mirror of https://github.com/stashapp/stash.git
[Files Refactor] Import export fixup (#2763)
* Adjust json schema * Remove mappings file from export * Import file/folder support * Update documentation * Make gallery filenames unique
This commit is contained in:
parent
1222b7b87b
commit
0b534d89c6
|
@ -0,0 +1,259 @@
|
|||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
)
|
||||
|
||||
// HACK: this is all here because of an import loop in jsonschema -> models -> file
|
||||
|
||||
var errZipFileNotExist = errors.New("zip file does not exist")
|
||||
|
||||
type fileFolderImporter struct {
|
||||
ReaderWriter file.Store
|
||||
FolderStore file.FolderStore
|
||||
Input jsonschema.DirEntry
|
||||
|
||||
file file.File
|
||||
folder *file.Folder
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) PreImport(ctx context.Context) error {
|
||||
var err error
|
||||
|
||||
switch ff := i.Input.(type) {
|
||||
case *jsonschema.BaseDirEntry:
|
||||
i.folder, err = i.folderJSONToFolder(ctx, ff)
|
||||
default:
|
||||
i.file, err = i.fileJSONToFile(ctx, i.Input)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) {
|
||||
path := filepath.FromSlash(baseJSON.Path)
|
||||
|
||||
ret := file.Folder{
|
||||
DirEntry: file.DirEntry{
|
||||
ModTime: baseJSON.ModTime.GetTime(),
|
||||
},
|
||||
Path: path,
|
||||
CreatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
}
|
||||
|
||||
if err := i.populateZipFileID(ctx, &ret.DirEntry); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set parent folder id during the creation process
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (file.File, error) {
|
||||
switch ff := fileJSON.(type) {
|
||||
case *jsonschema.VideoFile:
|
||||
baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &file.VideoFile{
|
||||
BaseFile: baseFile,
|
||||
Format: ff.Format,
|
||||
Width: ff.Width,
|
||||
Height: ff.Height,
|
||||
Duration: ff.Duration,
|
||||
VideoCodec: ff.VideoCodec,
|
||||
AudioCodec: ff.AudioCodec,
|
||||
FrameRate: ff.FrameRate,
|
||||
BitRate: ff.BitRate,
|
||||
Interactive: ff.Interactive,
|
||||
InteractiveSpeed: ff.InteractiveSpeed,
|
||||
}, nil
|
||||
case *jsonschema.ImageFile:
|
||||
baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &file.ImageFile{
|
||||
BaseFile: baseFile,
|
||||
Format: ff.Format,
|
||||
Width: ff.Width,
|
||||
Height: ff.Height,
|
||||
}, nil
|
||||
case *jsonschema.BaseFile:
|
||||
return i.baseFileJSONToBaseFile(ctx, ff)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unknown file type")
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) {
|
||||
path := filepath.FromSlash(baseJSON.Path)
|
||||
|
||||
baseFile := file.BaseFile{
|
||||
DirEntry: file.DirEntry{
|
||||
ModTime: baseJSON.ModTime.GetTime(),
|
||||
},
|
||||
Basename: filepath.Base(path),
|
||||
Size: baseJSON.Size,
|
||||
CreatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
}
|
||||
|
||||
for _, fp := range baseJSON.Fingerprints {
|
||||
baseFile.Fingerprints = append(baseFile.Fingerprints, file.Fingerprint{
|
||||
Type: fp.Type,
|
||||
Fingerprint: fp.Fingerprint,
|
||||
})
|
||||
}
|
||||
|
||||
if err := i.populateZipFileID(ctx, &baseFile.DirEntry); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &baseFile, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error {
|
||||
zipFilePath := filepath.FromSlash(i.Input.DirEntry().ZipFile)
|
||||
if zipFilePath != "" {
|
||||
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file by path %q: %v", zipFilePath, err)
|
||||
}
|
||||
|
||||
if zf == nil {
|
||||
return errZipFileNotExist
|
||||
}
|
||||
|
||||
id := zf.Base().ID
|
||||
f.ZipFileID = &id
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) Name() string {
|
||||
return filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) {
|
||||
path := filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
existing, err := i.ReaderWriter.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existing != nil {
|
||||
id := int(existing.Base().ID)
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string) (*file.Folder, error) {
|
||||
parentPath := filepath.Dir(p)
|
||||
|
||||
if parentPath == "." || parentPath == string(filepath.Separator) {
|
||||
// get or create this folder
|
||||
return i.getOrCreateFolder(ctx, p, nil)
|
||||
}
|
||||
|
||||
parent, err := i.createFolderHierarchy(ctx, parentPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return i.getOrCreateFolder(ctx, p, parent)
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, parent *file.Folder) (*file.Folder, error) {
|
||||
folder, err := i.FolderStore.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if folder != nil {
|
||||
return folder, nil
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
|
||||
folder = &file.Folder{
|
||||
Path: path,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
|
||||
if parent != nil {
|
||||
folder.ZipFileID = parent.ZipFileID
|
||||
folder.ParentFolderID = &parent.ID
|
||||
}
|
||||
|
||||
if err := i.FolderStore.Create(ctx, folder); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return folder, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) {
|
||||
// create folder hierarchy and set parent folder id
|
||||
path := filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
path = filepath.Dir(path)
|
||||
folder, err := i.createFolderHierarchy(ctx, path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating folder hierarchy for %q: %w", path, err)
|
||||
}
|
||||
|
||||
if i.folder != nil {
|
||||
return i.createFolder(ctx, folder)
|
||||
}
|
||||
|
||||
return i.createFile(ctx, folder)
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file.Folder) (*int, error) {
|
||||
if parentFolder != nil {
|
||||
i.file.Base().ParentFolderID = parentFolder.ID
|
||||
}
|
||||
|
||||
if err := i.ReaderWriter.Create(ctx, i.file); err != nil {
|
||||
return nil, fmt.Errorf("error creating file: %w", err)
|
||||
}
|
||||
|
||||
id := int(i.file.Base().ID)
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *file.Folder) (*int, error) {
|
||||
if parentFolder != nil {
|
||||
i.folder.ParentFolderID = &parentFolder.ID
|
||||
}
|
||||
|
||||
if err := i.FolderStore.Create(ctx, i.folder); err != nil {
|
||||
return nil, fmt.Errorf("error creating folder: %w", err)
|
||||
}
|
||||
|
||||
id := int(i.folder.ID)
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) Update(ctx context.Context, id int) error {
|
||||
// update not supported
|
||||
return nil
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
package manager
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/paths"
|
||||
)
|
||||
|
@ -9,14 +11,6 @@ type jsonUtils struct {
|
|||
json paths.JSONPaths
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
|
||||
return jsonschema.LoadMappingsFile(jp.json.MappingsFile)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveMappings(mappings *jsonschema.Mappings) error {
|
||||
return jsonschema.SaveMappingsFile(jp.json.MappingsFile, mappings)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
|
||||
return jsonschema.LoadScrapedFile(jp.json.ScrapedFile)
|
||||
}
|
||||
|
@ -25,58 +19,34 @@ func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
|
|||
return jsonschema.SaveScrapedFile(jp.json.ScrapedFile, scraped)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getPerformer(checksum string) (*jsonschema.Performer, error) {
|
||||
return jsonschema.LoadPerformerFile(jp.json.PerformerJSONPath(checksum))
|
||||
func (jp *jsonUtils) savePerformer(fn string, performer *jsonschema.Performer) error {
|
||||
return jsonschema.SavePerformerFile(filepath.Join(jp.json.Performers, fn), performer)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) savePerformer(checksum string, performer *jsonschema.Performer) error {
|
||||
return jsonschema.SavePerformerFile(jp.json.PerformerJSONPath(checksum), performer)
|
||||
func (jp *jsonUtils) saveStudio(fn string, studio *jsonschema.Studio) error {
|
||||
return jsonschema.SaveStudioFile(filepath.Join(jp.json.Studios, fn), studio)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getStudio(checksum string) (*jsonschema.Studio, error) {
|
||||
return jsonschema.LoadStudioFile(jp.json.StudioJSONPath(checksum))
|
||||
func (jp *jsonUtils) saveTag(fn string, tag *jsonschema.Tag) error {
|
||||
return jsonschema.SaveTagFile(filepath.Join(jp.json.Tags, fn), tag)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) error {
|
||||
return jsonschema.SaveStudioFile(jp.json.StudioJSONPath(checksum), studio)
|
||||
func (jp *jsonUtils) saveMovie(fn string, movie *jsonschema.Movie) error {
|
||||
return jsonschema.SaveMovieFile(filepath.Join(jp.json.Movies, fn), movie)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getTag(checksum string) (*jsonschema.Tag, error) {
|
||||
return jsonschema.LoadTagFile(jp.json.TagJSONPath(checksum))
|
||||
func (jp *jsonUtils) saveScene(fn string, scene *jsonschema.Scene) error {
|
||||
return jsonschema.SaveSceneFile(filepath.Join(jp.json.Scenes, fn), scene)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveTag(checksum string, tag *jsonschema.Tag) error {
|
||||
return jsonschema.SaveTagFile(jp.json.TagJSONPath(checksum), tag)
|
||||
func (jp *jsonUtils) saveImage(fn string, image *jsonschema.Image) error {
|
||||
return jsonschema.SaveImageFile(filepath.Join(jp.json.Images, fn), image)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getMovie(checksum string) (*jsonschema.Movie, error) {
|
||||
return jsonschema.LoadMovieFile(jp.json.MovieJSONPath(checksum))
|
||||
func (jp *jsonUtils) saveGallery(fn string, gallery *jsonschema.Gallery) error {
|
||||
return jsonschema.SaveGalleryFile(filepath.Join(jp.json.Galleries, fn), gallery)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveMovie(checksum string, movie *jsonschema.Movie) error {
|
||||
return jsonschema.SaveMovieFile(jp.json.MovieJSONPath(checksum), movie)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
|
||||
return jsonschema.LoadSceneFile(jp.json.SceneJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error {
|
||||
return jsonschema.SaveSceneFile(jp.json.SceneJSONPath(checksum), scene)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getImage(checksum string) (*jsonschema.Image, error) {
|
||||
return jsonschema.LoadImageFile(jp.json.ImageJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveImage(checksum string, image *jsonschema.Image) error {
|
||||
return jsonschema.SaveImageFile(jp.json.ImageJSONPath(checksum), image)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getGallery(checksum string) (*jsonschema.Gallery, error) {
|
||||
return jsonschema.LoadGalleryFile(jp.json.GalleryJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveGallery(checksum string, gallery *jsonschema.Gallery) error {
|
||||
return jsonschema.SaveGalleryFile(jp.json.GalleryJSONPath(checksum), gallery)
|
||||
func (jp *jsonUtils) saveFile(fn string, file jsonschema.DirEntry) error {
|
||||
return jsonschema.SaveFileFile(filepath.Join(jp.json.Files, fn), file)
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ type ImageReaderWriter interface {
|
|||
type GalleryReaderWriter interface {
|
||||
models.GalleryReaderWriter
|
||||
gallery.FinderCreatorUpdater
|
||||
gallery.Finder
|
||||
}
|
||||
|
||||
type SceneReaderWriter interface {
|
||||
|
|
|
@ -8,13 +8,14 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
@ -38,7 +39,6 @@ type ExportTask struct {
|
|||
baseDir string
|
||||
json jsonUtils
|
||||
|
||||
Mappings *jsonschema.Mappings
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
|
||||
scenes *exportSpec
|
||||
|
@ -118,8 +118,6 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count
|
||||
workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available
|
||||
|
||||
t.Mappings = &jsonschema.Mappings{}
|
||||
|
||||
startTime := time.Now()
|
||||
|
||||
if t.full {
|
||||
|
@ -140,10 +138,16 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
}()
|
||||
}
|
||||
|
||||
if t.baseDir == "" {
|
||||
logger.Errorf("baseDir must not be empty")
|
||||
return
|
||||
}
|
||||
|
||||
t.json = jsonUtils{
|
||||
json: *paths.GetJSONPaths(t.baseDir),
|
||||
}
|
||||
|
||||
paths.EmptyJSONDirs(t.baseDir)
|
||||
paths.EnsureJSONDirs(t.baseDir)
|
||||
|
||||
txnErr := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
|
@ -180,10 +184,6 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
logger.Warnf("error while running export transaction: %v", txnErr)
|
||||
}
|
||||
|
||||
if err := t.json.saveMappings(t.Mappings); err != nil {
|
||||
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
||||
}
|
||||
|
||||
if !t.full {
|
||||
err := t.generateDownload()
|
||||
if err != nil {
|
||||
|
@ -226,12 +226,6 @@ func (t *ExportTask) zipFiles(w io.Writer) error {
|
|||
json: *paths.GetJSONPaths(""),
|
||||
}
|
||||
|
||||
// write the mappings file
|
||||
err := t.zipFile(t.json.json.MappingsFile, "", z)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
walkWarn(t.json.json.Tags, t.zipWalkFunc(u.json.Tags, z))
|
||||
walkWarn(t.json.json.Galleries, t.zipWalkFunc(u.json.Galleries, z))
|
||||
walkWarn(t.json.json.Performers, t.zipWalkFunc(u.json.Performers, z))
|
||||
|
@ -380,7 +374,6 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit
|
|||
if (i % 100) == 0 { // make progress easier to read
|
||||
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
||||
}
|
||||
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path(), Checksum: scene.GetHash(t.fileNamingAlgorithm)})
|
||||
jobCh <- scene // feed workers
|
||||
}
|
||||
|
||||
|
@ -390,6 +383,96 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit
|
|||
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func exportFile(f file.File, t *ExportTask) {
|
||||
newFileJSON := fileToJSON(f)
|
||||
|
||||
fn := newFileJSON.Filename()
|
||||
|
||||
if err := t.json.saveFile(fn, newFileJSON); err != nil {
|
||||
logger.Errorf("[files] <%s> failed to save json: %s", fn, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func fileToJSON(f file.File) jsonschema.DirEntry {
|
||||
bf := f.Base()
|
||||
|
||||
base := jsonschema.BaseFile{
|
||||
BaseDirEntry: jsonschema.BaseDirEntry{
|
||||
Type: jsonschema.DirEntryTypeFile,
|
||||
ModTime: json.JSONTime{Time: bf.ModTime},
|
||||
Path: filepath.ToSlash(bf.Path),
|
||||
CreatedAt: json.JSONTime{Time: bf.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: bf.UpdatedAt},
|
||||
},
|
||||
Size: bf.Size,
|
||||
}
|
||||
|
||||
if bf.ZipFile != nil {
|
||||
base.ZipFile = bf.ZipFile.Base().Path
|
||||
}
|
||||
|
||||
for _, fp := range bf.Fingerprints {
|
||||
base.Fingerprints = append(base.Fingerprints, jsonschema.Fingerprint{
|
||||
Type: fp.Type,
|
||||
Fingerprint: fp.Fingerprint,
|
||||
})
|
||||
}
|
||||
|
||||
switch ff := f.(type) {
|
||||
case *file.VideoFile:
|
||||
base.Type = jsonschema.DirEntryTypeVideo
|
||||
return jsonschema.VideoFile{
|
||||
BaseFile: &base,
|
||||
Format: ff.Format,
|
||||
Width: ff.Width,
|
||||
Height: ff.Height,
|
||||
Duration: ff.Duration,
|
||||
VideoCodec: ff.VideoCodec,
|
||||
AudioCodec: ff.AudioCodec,
|
||||
FrameRate: ff.FrameRate,
|
||||
BitRate: ff.BitRate,
|
||||
Interactive: ff.Interactive,
|
||||
InteractiveSpeed: ff.InteractiveSpeed,
|
||||
}
|
||||
case *file.ImageFile:
|
||||
base.Type = jsonschema.DirEntryTypeImage
|
||||
return jsonschema.ImageFile{
|
||||
BaseFile: &base,
|
||||
Format: ff.Format,
|
||||
Width: ff.Width,
|
||||
Height: ff.Height,
|
||||
}
|
||||
}
|
||||
|
||||
return &base
|
||||
}
|
||||
|
||||
func exportFolder(f file.Folder, t *ExportTask) {
|
||||
newFileJSON := folderToJSON(f)
|
||||
|
||||
fn := newFileJSON.Filename()
|
||||
|
||||
if err := t.json.saveFile(fn, newFileJSON); err != nil {
|
||||
logger.Errorf("[files] <%s> failed to save json: %s", fn, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func folderToJSON(f file.Folder) jsonschema.DirEntry {
|
||||
base := jsonschema.BaseDirEntry{
|
||||
Type: jsonschema.DirEntryTypeFolder,
|
||||
ModTime: json.JSONTime{Time: f.ModTime},
|
||||
Path: filepath.ToSlash(f.Path),
|
||||
CreatedAt: json.JSONTime{Time: f.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: f.UpdatedAt},
|
||||
}
|
||||
|
||||
if f.ZipFile != nil {
|
||||
base.ZipFile = f.ZipFile.Base().Path
|
||||
}
|
||||
|
||||
return &base
|
||||
}
|
||||
|
||||
func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo Repository, t *ExportTask) {
|
||||
defer wg.Done()
|
||||
sceneReader := repo.Scene
|
||||
|
@ -413,6 +496,11 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
continue
|
||||
}
|
||||
|
||||
// export files
|
||||
for _, f := range s.Files {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
newSceneJSON.Studio, err = scene.GetStudioName(ctx, studioReader, s)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene studio name: %s", sceneHash, err.Error())
|
||||
|
@ -425,7 +513,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
continue
|
||||
}
|
||||
|
||||
newSceneJSON.Galleries = gallery.GetChecksums(galleries)
|
||||
newSceneJSON.Galleries = gallery.GetRefs(galleries)
|
||||
|
||||
performers, err := performerReader.FindBySceneID(ctx, s.ID)
|
||||
if err != nil {
|
||||
|
@ -477,12 +565,17 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
sceneJSON, err := t.json.getScene(sceneHash)
|
||||
if err == nil && jsonschema.CompareJSON(*sceneJSON, *newSceneJSON) {
|
||||
continue
|
||||
pf := s.PrimaryFile()
|
||||
basename := ""
|
||||
hash := ""
|
||||
if pf != nil {
|
||||
basename = pf.Basename
|
||||
hash = s.OSHash()
|
||||
}
|
||||
|
||||
if err := t.json.saveScene(sceneHash, newSceneJSON); err != nil {
|
||||
fn := newSceneJSON.Filename(basename, hash)
|
||||
|
||||
if err := t.json.saveScene(fn, newSceneJSON); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to save json: %s", sceneHash, err.Error())
|
||||
}
|
||||
}
|
||||
|
@ -522,7 +615,6 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo Reposit
|
|||
if (i % 100) == 0 { // make progress easier to read
|
||||
logger.Progressf("[images] %d of %d", index, len(images))
|
||||
}
|
||||
t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path(), Checksum: image.Checksum()})
|
||||
jobCh <- image // feed workers
|
||||
}
|
||||
|
||||
|
@ -544,6 +636,11 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
|
||||
newImageJSON := image.ToBasicJSON(s)
|
||||
|
||||
// export files
|
||||
for _, f := range s.Files {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
var err error
|
||||
newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s)
|
||||
if err != nil {
|
||||
|
@ -557,7 +654,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
continue
|
||||
}
|
||||
|
||||
newImageJSON.Galleries = t.getGalleryChecksums(imageGalleries)
|
||||
newImageJSON.Galleries = gallery.GetRefs(imageGalleries)
|
||||
|
||||
performers, err := performerReader.FindByImageID(ctx, s.ID)
|
||||
if err != nil {
|
||||
|
@ -585,24 +682,22 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
imageJSON, err := t.json.getImage(imageHash)
|
||||
if err == nil && jsonschema.CompareJSON(*imageJSON, *newImageJSON) {
|
||||
continue
|
||||
pf := s.PrimaryFile()
|
||||
basename := ""
|
||||
hash := ""
|
||||
if pf != nil {
|
||||
basename = pf.Basename
|
||||
hash = s.Checksum()
|
||||
}
|
||||
|
||||
if err := t.json.saveImage(imageHash, newImageJSON); err != nil {
|
||||
fn := newImageJSON.Filename(basename, hash)
|
||||
|
||||
if err := t.json.saveImage(fn, newImageJSON); err != nil {
|
||||
logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) {
|
||||
for _, g := range galleries {
|
||||
ret = append(ret, g.Checksum())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repository) {
|
||||
var galleriesWg sync.WaitGroup
|
||||
|
||||
|
@ -638,14 +733,6 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repo
|
|||
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
||||
}
|
||||
|
||||
title := gallery.Title
|
||||
path := gallery.Path()
|
||||
|
||||
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{
|
||||
Path: path,
|
||||
Name: title,
|
||||
Checksum: gallery.Checksum(),
|
||||
})
|
||||
jobCh <- gallery
|
||||
}
|
||||
|
||||
|
@ -670,6 +757,27 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
|
|||
continue
|
||||
}
|
||||
|
||||
// export files
|
||||
for _, f := range g.Files {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
// export folder if necessary
|
||||
if g.FolderID != nil {
|
||||
folder, err := repo.Folder.Find(ctx, *g.FolderID)
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] <%s> error getting gallery folder: %v", galleryHash, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if folder == nil {
|
||||
logger.Errorf("[galleries] <%s> unable to find gallery folder", galleryHash)
|
||||
continue
|
||||
}
|
||||
|
||||
exportFolder(*folder, t)
|
||||
}
|
||||
|
||||
newGalleryJSON.Studio, err = gallery.GetStudioName(ctx, studioReader, g)
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] <%s> error getting gallery studio name: %s", galleryHash, err.Error())
|
||||
|
@ -701,12 +809,23 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
|
|||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
galleryJSON, err := t.json.getGallery(galleryHash)
|
||||
if err == nil && jsonschema.CompareJSON(*galleryJSON, *newGalleryJSON) {
|
||||
continue
|
||||
pf := g.PrimaryFile()
|
||||
basename := ""
|
||||
// use id in case multiple galleries with the same basename
|
||||
hash := strconv.Itoa(g.ID)
|
||||
|
||||
switch {
|
||||
case pf != nil:
|
||||
basename = pf.Base().Basename
|
||||
case g.FolderPath != "":
|
||||
basename = filepath.Base(g.FolderPath)
|
||||
default:
|
||||
basename = g.Title
|
||||
}
|
||||
|
||||
if err := t.json.saveGallery(galleryHash, newGalleryJSON); err != nil {
|
||||
fn := newGalleryJSON.Filename(basename, hash)
|
||||
|
||||
if err := t.json.saveGallery(fn, newGalleryJSON); err != nil {
|
||||
logger.Errorf("[galleries] <%s> failed to save json: %s", galleryHash, err.Error())
|
||||
}
|
||||
}
|
||||
|
@ -742,7 +861,6 @@ func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo Rep
|
|||
index := i + 1
|
||||
logger.Progressf("[performers] %d of %d", index, len(performers))
|
||||
|
||||
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.PathNameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
|
||||
jobCh <- performer // feed workers
|
||||
}
|
||||
|
||||
|
@ -777,14 +895,9 @@ func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jo
|
|||
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags))
|
||||
}
|
||||
|
||||
performerJSON, err := t.json.getPerformer(p.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[performers] error reading performer json: %s", err.Error())
|
||||
} else if jsonschema.CompareJSON(*performerJSON, *newPerformerJSON) {
|
||||
continue
|
||||
}
|
||||
fn := newPerformerJSON.Filename()
|
||||
|
||||
if err := t.json.savePerformer(p.Checksum, newPerformerJSON); err != nil {
|
||||
if err := t.json.savePerformer(fn, newPerformerJSON); err != nil {
|
||||
logger.Errorf("[performers] <%s> failed to save json: %s", p.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
@ -821,7 +934,6 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo Reposi
|
|||
index := i + 1
|
||||
logger.Progressf("[studios] %d of %d", index, len(studios))
|
||||
|
||||
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.PathNameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
|
||||
jobCh <- studio // feed workers
|
||||
}
|
||||
|
||||
|
@ -844,12 +956,9 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
|
|||
continue
|
||||
}
|
||||
|
||||
studioJSON, err := t.json.getStudio(s.Checksum)
|
||||
if err == nil && jsonschema.CompareJSON(*studioJSON, *newStudioJSON) {
|
||||
continue
|
||||
}
|
||||
fn := newStudioJSON.Filename()
|
||||
|
||||
if err := t.json.saveStudio(s.Checksum, newStudioJSON); err != nil {
|
||||
if err := t.json.saveStudio(fn, newStudioJSON); err != nil {
|
||||
logger.Errorf("[studios] <%s> failed to save json: %s", s.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
@ -886,10 +995,6 @@ func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo Repositor
|
|||
index := i + 1
|
||||
logger.Progressf("[tags] %d of %d", index, len(tags))
|
||||
|
||||
// generate checksum on the fly by name, since we don't store it
|
||||
checksum := md5.FromString(tag.Name)
|
||||
|
||||
t.Mappings.Tags = append(t.Mappings.Tags, jsonschema.PathNameMapping{Name: tag.Name, Checksum: checksum})
|
||||
jobCh <- tag // feed workers
|
||||
}
|
||||
|
||||
|
@ -912,16 +1017,10 @@ func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan
|
|||
continue
|
||||
}
|
||||
|
||||
// generate checksum on the fly by name, since we don't store it
|
||||
checksum := md5.FromString(thisTag.Name)
|
||||
fn := newTagJSON.Filename()
|
||||
|
||||
tagJSON, err := t.json.getTag(checksum)
|
||||
if err == nil && jsonschema.CompareJSON(*tagJSON, *newTagJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := t.json.saveTag(checksum, newTagJSON); err != nil {
|
||||
logger.Errorf("[tags] <%s> failed to save json: %s", checksum, err.Error())
|
||||
if err := t.json.saveTag(fn, newTagJSON); err != nil {
|
||||
logger.Errorf("[tags] <%s> failed to save json: %s", fn, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -957,7 +1056,6 @@ func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo Reposit
|
|||
index := i + 1
|
||||
logger.Progressf("[movies] %d of %d", index, len(movies))
|
||||
|
||||
t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.PathNameMapping{Name: movie.Name.String, Checksum: movie.Checksum})
|
||||
jobCh <- movie // feed workers
|
||||
}
|
||||
|
||||
|
@ -987,15 +1085,10 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
|
|||
}
|
||||
}
|
||||
|
||||
movieJSON, err := t.json.getMovie(m.Checksum)
|
||||
if err != nil {
|
||||
logger.Debugf("[movies] error reading movie json: %s", err.Error())
|
||||
} else if jsonschema.CompareJSON(*movieJSON, *newMovieJSON) {
|
||||
continue
|
||||
}
|
||||
fn := newMovieJSON.Filename()
|
||||
|
||||
if err := t.json.saveMovie(m.Checksum, newMovieJSON); err != nil {
|
||||
logger.Errorf("[movies] <%s> failed to save json: %s", m.Checksum, err.Error())
|
||||
if err := t.json.saveMovie(fn, newMovieJSON); err != nil {
|
||||
logger.Errorf("[movies] <%s> failed to save json: %s", fn, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
@ -37,7 +38,6 @@ type ImportTask struct {
|
|||
DuplicateBehaviour ImportDuplicateEnum
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
mappings *jsonschema.Mappings
|
||||
scraped []jsonschema.ScrapedItem
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
@ -112,11 +112,6 @@ func (t *ImportTask) Start(ctx context.Context) {
|
|||
t.MissingRefBehaviour = models.ImportMissingRefEnumFail
|
||||
}
|
||||
|
||||
t.mappings, _ = t.json.getMappings()
|
||||
if t.mappings == nil {
|
||||
logger.Error("missing mappings json")
|
||||
return
|
||||
}
|
||||
scraped, _ := t.json.getScraped()
|
||||
if scraped == nil {
|
||||
logger.Warn("missing scraped json")
|
||||
|
@ -136,6 +131,7 @@ func (t *ImportTask) Start(ctx context.Context) {
|
|||
t.ImportPerformers(ctx)
|
||||
t.ImportStudios(ctx)
|
||||
t.ImportMovies(ctx)
|
||||
t.ImportFiles(ctx)
|
||||
t.ImportGalleries(ctx)
|
||||
|
||||
t.ImportScrapedItems(ctx)
|
||||
|
@ -199,15 +195,25 @@ func (t *ImportTask) unzipFile() error {
|
|||
func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
logger.Info("[performers] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Performers {
|
||||
path := t.json.json.Performers
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[performers] failed to read performers directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
performerJSON, err := t.json.getPerformer(mappingJSON.Checksum)
|
||||
performerJSON, err := jsonschema.LoadPerformerFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[performers] %d of %d", index, len(t.mappings.Performers))
|
||||
logger.Progressf("[performers] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
r := t.txnManager
|
||||
|
@ -220,7 +226,7 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
|||
|
||||
return performImport(ctx, importer, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[performers] <%s> import failed: %s", mappingJSON.Checksum, err.Error())
|
||||
logger.Errorf("[performers] <%s> import failed: %s", fi.Name(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,15 +238,25 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
|||
|
||||
logger.Info("[studios] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Studios {
|
||||
path := t.json.json.Studios
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[studios] failed to read studios directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
studioJSON, err := t.json.getStudio(mappingJSON.Checksum)
|
||||
studioJSON, err := jsonschema.LoadStudioFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[studios] %d of %d", index, len(t.mappings.Studios))
|
||||
logger.Progressf("[studios] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
return t.ImportStudio(ctx, studioJSON, pendingParent, t.txnManager.Studio)
|
||||
|
@ -253,7 +269,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
|||
continue
|
||||
}
|
||||
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -311,15 +327,25 @@ func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.St
|
|||
func (t *ImportTask) ImportMovies(ctx context.Context) {
|
||||
logger.Info("[movies] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Movies {
|
||||
path := t.json.json.Movies
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[movies] failed to read movies directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
movieJSON, err := t.json.getMovie(mappingJSON.Checksum)
|
||||
movieJSON, err := jsonschema.LoadMovieFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[movies] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[movies] %d of %d", index, len(t.mappings.Movies))
|
||||
logger.Progressf("[movies] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
r := t.txnManager
|
||||
|
@ -335,7 +361,7 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
|||
|
||||
return performImport(ctx, movieImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[movies] <%s> import failed: %s", mappingJSON.Checksum, err.Error())
|
||||
logger.Errorf("[movies] <%s> import failed: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -343,18 +369,118 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
|||
logger.Info("[movies] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportFiles(ctx context.Context) {
|
||||
logger.Info("[files] importing")
|
||||
|
||||
path := t.json.json.Files
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[files] failed to read files directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
pendingParent := make(map[string][]jsonschema.DirEntry)
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
fileJSON, err := jsonschema.LoadFileFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[files] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[files] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
return t.ImportFile(ctx, fileJSON, pendingParent)
|
||||
}); err != nil {
|
||||
if errors.Is(err, errZipFileNotExist) {
|
||||
// add to the pending parent list so that it is created after the parent
|
||||
s := pendingParent[fileJSON.DirEntry().ZipFile]
|
||||
s = append(s, fileJSON)
|
||||
pendingParent[fileJSON.DirEntry().ZipFile] = s
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Errorf("[files] <%s> failed to create: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// create the leftover studios, warning for missing parents
|
||||
if len(pendingParent) > 0 {
|
||||
logger.Warnf("[files] importing files with missing zip files")
|
||||
|
||||
for _, s := range pendingParent {
|
||||
for _, orphanFileJSON := range s {
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
return t.ImportFile(ctx, orphanFileJSON, nil)
|
||||
}); err != nil {
|
||||
logger.Errorf("[files] <%s> failed to create: %s", orphanFileJSON.DirEntry().Path, err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[files] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportFile(ctx context.Context, fileJSON jsonschema.DirEntry, pendingParent map[string][]jsonschema.DirEntry) error {
|
||||
r := t.txnManager
|
||||
readerWriter := r.File
|
||||
|
||||
fileImporter := &fileFolderImporter{
|
||||
ReaderWriter: readerWriter,
|
||||
FolderStore: r.Folder,
|
||||
Input: fileJSON,
|
||||
}
|
||||
|
||||
// ignore duplicate files - don't overwrite
|
||||
if err := performImport(ctx, fileImporter, ImportDuplicateEnumIgnore); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// now create the files pending this file's creation
|
||||
s := pendingParent[fileJSON.DirEntry().Path]
|
||||
for _, childFileJSON := range s {
|
||||
// map is nil since we're not checking parent studios at this point
|
||||
if err := t.ImportFile(ctx, childFileJSON, nil); err != nil {
|
||||
return fmt.Errorf("failed to create child file <%s>: %s", childFileJSON.DirEntry().Path, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// delete the entry from the map so that we know its not left over
|
||||
delete(pendingParent, fileJSON.DirEntry().Path)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||
logger.Info("[galleries] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Galleries {
|
||||
path := t.json.json.Galleries
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[galleries] failed to read galleries directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
galleryJSON, err := t.json.getGallery(mappingJSON.Checksum)
|
||||
galleryJSON, err := jsonschema.LoadGalleryFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[galleries] %d of %d", index, len(t.mappings.Galleries))
|
||||
logger.Progressf("[galleries] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
r := t.txnManager
|
||||
|
@ -365,6 +491,8 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
|||
|
||||
galleryImporter := &gallery.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
FolderFinder: r.Folder,
|
||||
FileFinder: r.File,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
TagWriter: tagWriter,
|
||||
|
@ -374,7 +502,7 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
|||
|
||||
return performImport(ctx, galleryImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[galleries] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error())
|
||||
logger.Errorf("[galleries] <%s> import failed to commit: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -386,15 +514,25 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
|
|||
pendingParent := make(map[string][]*jsonschema.Tag)
|
||||
logger.Info("[tags] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Tags {
|
||||
path := t.json.json.Tags
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[tags] failed to read tags directory: %v", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
tagJSON, err := t.json.getTag(mappingJSON.Checksum)
|
||||
tagJSON, err := jsonschema.LoadTagFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Errorf("[tags] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Progressf("[tags] %d of %d", index, len(t.mappings.Tags))
|
||||
logger.Progressf("[tags] %d of %d", index, len(files))
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
return t.ImportTag(ctx, tagJSON, pendingParent, false, t.txnManager.Tag)
|
||||
|
@ -405,7 +543,7 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
|
|||
continue
|
||||
}
|
||||
|
||||
logger.Errorf("[tags] <%s> failed to import: %s", mappingJSON.Checksum, err.Error())
|
||||
logger.Errorf("[tags] <%s> failed to import: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -467,7 +605,7 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
|||
|
||||
for i, mappingJSON := range t.scraped {
|
||||
index := i + 1
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(t.mappings.Scenes))
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(t.scraped))
|
||||
|
||||
newScrapedItem := models.ScrapedItem{
|
||||
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||
|
@ -511,18 +649,26 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
|||
func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
logger.Info("[scenes] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Scenes {
|
||||
index := i + 1
|
||||
|
||||
logger.Progressf("[scenes] %d of %d", index, len(t.mappings.Scenes))
|
||||
|
||||
sceneJSON, err := t.json.getScene(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
path := t.json.json.Scenes
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[scenes] failed to read scenes directory: %v", err)
|
||||
}
|
||||
|
||||
sceneHash := mappingJSON.Checksum
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
|
||||
logger.Progressf("[scenes] %d of %d", index, len(files))
|
||||
|
||||
sceneJSON, err := jsonschema.LoadSceneFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Infof("[scenes] <%s> json parse failure: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
r := t.txnManager
|
||||
|
@ -537,12 +683,12 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
|||
sceneImporter := &scene.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
Input: *sceneJSON,
|
||||
Path: mappingJSON.Path,
|
||||
FileFinder: r.File,
|
||||
|
||||
FileNamingAlgorithm: t.fileNamingAlgorithm,
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
|
||||
GalleryWriter: galleryWriter,
|
||||
GalleryFinder: galleryWriter,
|
||||
MovieWriter: movieWriter,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
|
@ -570,7 +716,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
|||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf("[scenes] <%s> import failed: %s", sceneHash, err.Error())
|
||||
logger.Errorf("[scenes] <%s> import failed: %s", fi.Name(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -580,18 +726,26 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
|||
func (t *ImportTask) ImportImages(ctx context.Context) {
|
||||
logger.Info("[images] importing")
|
||||
|
||||
for i, mappingJSON := range t.mappings.Images {
|
||||
index := i + 1
|
||||
|
||||
logger.Progressf("[images] %d of %d", index, len(t.mappings.Images))
|
||||
|
||||
imageJSON, err := t.json.getImage(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[images] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
path := t.json.json.Images
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
logger.Errorf("[images] failed to read images directory: %v", err)
|
||||
}
|
||||
|
||||
imageHash := mappingJSON.Checksum
|
||||
return
|
||||
}
|
||||
|
||||
for i, fi := range files {
|
||||
index := i + 1
|
||||
|
||||
logger.Progressf("[images] %d of %d", index, len(files))
|
||||
|
||||
imageJSON, err := jsonschema.LoadImageFile(filepath.Join(path, fi.Name()))
|
||||
if err != nil {
|
||||
logger.Infof("[images] <%s> json parse failure: %s", fi.Name(), err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
r := t.txnManager
|
||||
|
@ -603,12 +757,12 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
|
|||
|
||||
imageImporter := &image.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
FileFinder: r.File,
|
||||
Input: *imageJSON,
|
||||
Path: mappingJSON.Path,
|
||||
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
|
||||
GalleryWriter: galleryWriter,
|
||||
GalleryFinder: galleryWriter,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
TagWriter: tagWriter,
|
||||
|
@ -616,7 +770,7 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
|
|||
|
||||
return performImport(ctx, imageImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[images] <%s> import failed: %s", imageHash, err.Error())
|
||||
logger.Errorf("[images] <%s> import failed: %s", fi.Name(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,14 +13,17 @@ import (
|
|||
// does not convert the relationships to other objects.
|
||||
func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
|
||||
newGalleryJSON := jsonschema.Gallery{
|
||||
Title: gallery.Title,
|
||||
URL: gallery.URL,
|
||||
Details: gallery.Details,
|
||||
CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
|
||||
FolderPath: gallery.FolderPath,
|
||||
Title: gallery.Title,
|
||||
URL: gallery.URL,
|
||||
Details: gallery.Details,
|
||||
CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
|
||||
}
|
||||
|
||||
newGalleryJSON.Path = gallery.Path()
|
||||
for _, f := range gallery.Files {
|
||||
newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path)
|
||||
}
|
||||
|
||||
if gallery.Date != nil {
|
||||
newGalleryJSON.Date = gallery.Date.String()
|
||||
|
@ -61,12 +64,22 @@ func GetIDs(galleries []*models.Gallery) []int {
|
|||
return results
|
||||
}
|
||||
|
||||
func GetChecksums(galleries []*models.Gallery) []string {
|
||||
var results []string
|
||||
func GetRefs(galleries []*models.Gallery) []jsonschema.GalleryRef {
|
||||
var results []jsonschema.GalleryRef
|
||||
for _, gallery := range galleries {
|
||||
if gallery.Checksum() != "" {
|
||||
results = append(results, gallery.Checksum())
|
||||
toAdd := jsonschema.GalleryRef{}
|
||||
switch {
|
||||
case gallery.FolderPath != "":
|
||||
toAdd.FolderPath = gallery.FolderPath
|
||||
case len(gallery.Files) > 0:
|
||||
for _, f := range gallery.Files {
|
||||
toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path)
|
||||
}
|
||||
default:
|
||||
toAdd.Title = gallery.Title
|
||||
}
|
||||
|
||||
results = append(results, toAdd)
|
||||
}
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,171 +1,162 @@
|
|||
package gallery
|
||||
|
||||
// import (
|
||||
// "errors"
|
||||
import (
|
||||
"errors"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
// "testing"
|
||||
// "time"
|
||||
// )
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// const (
|
||||
// galleryID = 1
|
||||
const (
|
||||
galleryID = 1
|
||||
|
||||
// studioID = 4
|
||||
// missingStudioID = 5
|
||||
// errStudioID = 6
|
||||
studioID = 4
|
||||
missingStudioID = 5
|
||||
errStudioID = 6
|
||||
|
||||
// // noTagsID = 11
|
||||
// )
|
||||
// noTagsID = 11
|
||||
)
|
||||
|
||||
// var (
|
||||
// path = "path"
|
||||
// isZip = true
|
||||
// url = "url"
|
||||
// checksum = "checksum"
|
||||
// title = "title"
|
||||
// date = "2001-01-01"
|
||||
// dateObj = models.NewDate(date)
|
||||
// rating = 5
|
||||
// organized = true
|
||||
// details = "details"
|
||||
// )
|
||||
var (
|
||||
url = "url"
|
||||
title = "title"
|
||||
date = "2001-01-01"
|
||||
dateObj = models.NewDate(date)
|
||||
rating = 5
|
||||
organized = true
|
||||
details = "details"
|
||||
)
|
||||
|
||||
// const (
|
||||
// studioName = "studioName"
|
||||
// )
|
||||
const (
|
||||
studioName = "studioName"
|
||||
)
|
||||
|
||||
// var (
|
||||
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// )
|
||||
var (
|
||||
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
// func createFullGallery(id int) models.Gallery {
|
||||
// return models.Gallery{
|
||||
// ID: id,
|
||||
// Path: &path,
|
||||
// Zip: isZip,
|
||||
// Title: title,
|
||||
// Checksum: checksum,
|
||||
// Date: &dateObj,
|
||||
// Details: details,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: createTime,
|
||||
// UpdatedAt: updateTime,
|
||||
// }
|
||||
// }
|
||||
func createFullGallery(id int) models.Gallery {
|
||||
return models.Gallery{
|
||||
ID: id,
|
||||
Title: title,
|
||||
Date: &dateObj,
|
||||
Details: details,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
}
|
||||
}
|
||||
|
||||
// func createFullJSONGallery() *jsonschema.Gallery {
|
||||
// return &jsonschema.Gallery{
|
||||
// Title: title,
|
||||
// Path: path,
|
||||
// Zip: isZip,
|
||||
// Checksum: checksum,
|
||||
// Date: date,
|
||||
// Details: details,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createTime,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updateTime,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
func createFullJSONGallery() *jsonschema.Gallery {
|
||||
return &jsonschema.Gallery{
|
||||
Title: title,
|
||||
Date: date,
|
||||
Details: details,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updateTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// type basicTestScenario struct {
|
||||
// input models.Gallery
|
||||
// expected *jsonschema.Gallery
|
||||
// err bool
|
||||
// }
|
||||
type basicTestScenario struct {
|
||||
input models.Gallery
|
||||
expected *jsonschema.Gallery
|
||||
err bool
|
||||
}
|
||||
|
||||
// var scenarios = []basicTestScenario{
|
||||
// {
|
||||
// createFullGallery(galleryID),
|
||||
// createFullJSONGallery(),
|
||||
// false,
|
||||
// },
|
||||
// }
|
||||
var scenarios = []basicTestScenario{
|
||||
{
|
||||
createFullGallery(galleryID),
|
||||
createFullJSONGallery(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestToJSON(t *testing.T) {
|
||||
// for i, s := range scenarios {
|
||||
// gallery := s.input
|
||||
// json, err := ToBasicJSON(&gallery)
|
||||
func TestToJSON(t *testing.T) {
|
||||
for i, s := range scenarios {
|
||||
gallery := s.input
|
||||
json, err := ToBasicJSON(&gallery)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// func createStudioGallery(studioID int) models.Gallery {
|
||||
// return models.Gallery{
|
||||
// StudioID: &studioID,
|
||||
// }
|
||||
// }
|
||||
func createStudioGallery(studioID int) models.Gallery {
|
||||
return models.Gallery{
|
||||
StudioID: &studioID,
|
||||
}
|
||||
}
|
||||
|
||||
// type stringTestScenario struct {
|
||||
// input models.Gallery
|
||||
// expected string
|
||||
// err bool
|
||||
// }
|
||||
type stringTestScenario struct {
|
||||
input models.Gallery
|
||||
expected string
|
||||
err bool
|
||||
}
|
||||
|
||||
// var getStudioScenarios = []stringTestScenario{
|
||||
// {
|
||||
// createStudioGallery(studioID),
|
||||
// studioName,
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioGallery(missingStudioID),
|
||||
// "",
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioGallery(errStudioID),
|
||||
// "",
|
||||
// true,
|
||||
// },
|
||||
// }
|
||||
var getStudioScenarios = []stringTestScenario{
|
||||
{
|
||||
createStudioGallery(studioID),
|
||||
studioName,
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioGallery(missingStudioID),
|
||||
"",
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioGallery(errStudioID),
|
||||
"",
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestGetStudioName(t *testing.T) {
|
||||
// mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
func TestGetStudioName(t *testing.T) {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
|
||||
// studioErr := errors.New("error getting image")
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
// Name: models.NullString(studioName),
|
||||
// }, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
// for i, s := range getStudioScenarios {
|
||||
// gallery := s.input
|
||||
// json, err := GetStudioName(testCtx, mockStudioReader, &gallery)
|
||||
for i, s := range getStudioScenarios {
|
||||
gallery := s.input
|
||||
json, err := GetStudioName(testCtx, mockStudioReader, &gallery)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// mockStudioReader.AssertExpectations(t)
|
||||
// }
|
||||
mockStudioReader.AssertExpectations(t)
|
||||
}
|
||||
|
|
|
@ -3,8 +3,10 @@ package gallery
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
|
@ -18,6 +20,8 @@ type Importer struct {
|
|||
StudioWriter studio.NameFinderCreator
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
FileFinder file.Getter
|
||||
FolderFinder file.FolderGetter
|
||||
Input jsonschema.Gallery
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
|
@ -32,6 +36,10 @@ type FullCreatorUpdater interface {
|
|||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.gallery = i.galleryJSONToGallery(i.Input)
|
||||
|
||||
if err := i.populateFilesFolder(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -238,31 +246,97 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||
for _, ref := range i.Input.ZipFiles {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("gallery zip file '%s' not found", path)
|
||||
} else {
|
||||
i.gallery.Files = append(i.gallery.Files, f)
|
||||
}
|
||||
}
|
||||
|
||||
if i.Input.FolderPath != "" {
|
||||
path := filepath.FromSlash(i.Input.FolderPath)
|
||||
f, err := i.FolderFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding folder: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("gallery folder '%s' not found", path)
|
||||
} else {
|
||||
i.gallery.FolderID = &f.ID
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) PostImport(ctx context.Context, id int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Input.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if i.Input.FolderPath != "" {
|
||||
return i.Input.FolderPath
|
||||
}
|
||||
|
||||
if len(i.Input.ZipFiles) > 0 {
|
||||
return i.Input.ZipFiles[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// TODO
|
||||
// existing, err := i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
var existing []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case len(i.gallery.Files) > 0:
|
||||
for _, f := range i.gallery.Files {
|
||||
existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if existing != nil {
|
||||
// id := existing.ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if existing != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
case i.gallery.FolderID != nil:
|
||||
existing, err = i.ReaderWriter.FindByFolderID(ctx, *i.gallery.FolderID)
|
||||
default:
|
||||
existing, err = i.ReaderWriter.FindUserGalleryByTitle(ctx, i.gallery.Title)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
err := i.ReaderWriter.Create(ctx, &i.gallery, nil)
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.gallery.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating gallery: %v", err)
|
||||
}
|
||||
|
|
|
@ -1,441 +1,322 @@
|
|||
package gallery
|
||||
|
||||
// import (
|
||||
// "context"
|
||||
// "errors"
|
||||
// "testing"
|
||||
// "time"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
// "github.com/stretchr/testify/mock"
|
||||
// )
|
||||
|
||||
// var (
|
||||
// galleryNameErr = "galleryNameErr"
|
||||
// // existingGalleryName = "existingGalleryName"
|
||||
|
||||
// existingGalleryID = 100
|
||||
// existingStudioID = 101
|
||||
// existingPerformerID = 103
|
||||
// existingTagID = 105
|
||||
|
||||
// existingStudioName = "existingStudioName"
|
||||
// existingStudioErr = "existingStudioErr"
|
||||
// missingStudioName = "missingStudioName"
|
||||
|
||||
// existingPerformerName = "existingPerformerName"
|
||||
// existingPerformerErr = "existingPerformerErr"
|
||||
// missingPerformerName = "missingPerformerName"
|
||||
|
||||
// existingTagName = "existingTagName"
|
||||
// existingTagErr = "existingTagErr"
|
||||
// missingTagName = "missingTagName"
|
||||
|
||||
// missingChecksum = "missingChecksum"
|
||||
// errChecksum = "errChecksum"
|
||||
// )
|
||||
|
||||
// var testCtx = context.Background()
|
||||
|
||||
// var (
|
||||
// createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
// updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
// )
|
||||
|
||||
// func TestImporterName(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// },
|
||||
// }
|
||||
|
||||
// assert.Equal(t, path, i.Name())
|
||||
// }
|
||||
|
||||
// func TestImporterPreImport(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Checksum: checksum,
|
||||
// Title: title,
|
||||
// Date: date,
|
||||
// Details: details,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createdAt,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// expectedGallery := models.Gallery{
|
||||
// Path: &path,
|
||||
// Checksum: checksum,
|
||||
// Title: title,
|
||||
// Date: &dateObj,
|
||||
// Details: details,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: createdAt,
|
||||
// UpdatedAt: updatedAt,
|
||||
// }
|
||||
|
||||
// assert.Equal(t, expectedGallery, i.gallery)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Studio: existingStudioName,
|
||||
// Path: path,
|
||||
// },
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil).Once()
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
// i.Input.Studio = existingStudioErr
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// existingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
// {
|
||||
// ID: existingPerformerID,
|
||||
// Name: models.NullString(existingPerformerName),
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs)
|
||||
|
||||
// i.Input.Performers = []string{existingPerformerErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
// ID: existingPerformerID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// existingTagName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
// {
|
||||
// ID: existingTagID,
|
||||
// Name: existingTagName,
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
|
||||
|
||||
// i.Input.Tags = []string{existingTagErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
// ID: existingTagID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterFindExistingID(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Checksum: missingChecksum,
|
||||
// },
|
||||
// }
|
||||
|
||||
// expectedErr := errors.New("FindBy* error")
|
||||
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Gallery{
|
||||
// ID: existingGalleryID,
|
||||
// }, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
|
||||
|
||||
// id, err := i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = checksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Equal(t, existingGalleryID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = errChecksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestCreate(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// gallery := models.Gallery{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// galleryErr := models.Gallery{
|
||||
// Title: galleryNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// gallery: gallery,
|
||||
// }
|
||||
|
||||
// errCreate := errors.New("Create error")
|
||||
// readerWriter.On("Create", testCtx, &gallery).Run(func(args mock.Arguments) {
|
||||
// args.Get(1).(*models.Gallery).ID = galleryID
|
||||
// }).Return(nil).Once()
|
||||
// readerWriter.On("Create", testCtx, &galleryErr).Return(errCreate).Once()
|
||||
|
||||
// id, err := i.Create(testCtx)
|
||||
// assert.Equal(t, galleryID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.gallery = galleryErr
|
||||
// id, err = i.Create(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestUpdate(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// gallery := models.Gallery{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// gallery: gallery,
|
||||
// }
|
||||
|
||||
// // id needs to be set for the mock input
|
||||
// gallery.ID = galleryID
|
||||
// readerWriter.On("Update", testCtx, &gallery).Return(nil, nil).Once()
|
||||
|
||||
// err := i.Update(testCtx, galleryID)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var (
|
||||
existingStudioID = 101
|
||||
existingPerformerID = 103
|
||||
existingTagID = 105
|
||||
|
||||
existingStudioName = "existingStudioName"
|
||||
existingStudioErr = "existingStudioErr"
|
||||
missingStudioName = "missingStudioName"
|
||||
|
||||
existingPerformerName = "existingPerformerName"
|
||||
existingPerformerErr = "existingPerformerErr"
|
||||
missingPerformerName = "missingPerformerName"
|
||||
|
||||
existingTagName = "existingTagName"
|
||||
existingTagErr = "existingTagErr"
|
||||
missingTagName = "missingTagName"
|
||||
)
|
||||
|
||||
var testCtx = context.Background()
|
||||
|
||||
var (
|
||||
createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
)
|
||||
|
||||
func TestImporterPreImport(t *testing.T) {
|
||||
i := Importer{
|
||||
Input: jsonschema.Gallery{
|
||||
Title: title,
|
||||
Date: date,
|
||||
Details: details,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createdAt,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updatedAt,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
expectedGallery := models.Gallery{
|
||||
Title: title,
|
||||
Date: &dateObj,
|
||||
Details: details,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedGallery, i.gallery)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: existingStudioName,
|
||||
},
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil).Once()
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
i.Input.Studio = existingStudioErr
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
existingPerformerName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
|
||||
|
||||
i.Input.Performers = []string{existingPerformerErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
ID: existingPerformerID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
existingTagName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, nil).Once()
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
|
||||
|
||||
i.Input.Tags = []string{existingTagErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
|
@ -15,8 +16,12 @@ type CountQueryer interface {
|
|||
QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error)
|
||||
}
|
||||
|
||||
type ChecksumsFinder interface {
|
||||
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
||||
type Finder interface {
|
||||
FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
|
||||
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
|
||||
FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error)
|
||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
||||
}
|
||||
|
||||
func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) {
|
||||
|
|
|
@ -16,8 +16,7 @@ import (
|
|||
// const mutexType = "gallery"
|
||||
|
||||
type FinderCreatorUpdater interface {
|
||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
||||
Finder
|
||||
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
|
||||
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||
}
|
||||
|
|
|
@ -14,7 +14,6 @@ import (
|
|||
// of cover image.
|
||||
func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
||||
newImageJSON := jsonschema.Image{
|
||||
Checksum: image.Checksum(),
|
||||
Title: image.Title,
|
||||
CreatedAt: json.JSONTime{Time: image.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: image.UpdatedAt},
|
||||
|
@ -27,23 +26,25 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
|||
newImageJSON.Organized = image.Organized
|
||||
newImageJSON.OCounter = image.OCounter
|
||||
|
||||
newImageJSON.File = getImageFileJSON(image)
|
||||
for _, f := range image.Files {
|
||||
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
return &newImageJSON
|
||||
}
|
||||
|
||||
func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
|
||||
ret := &jsonschema.ImageFile{}
|
||||
// func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
|
||||
// ret := &jsonschema.ImageFile{}
|
||||
|
||||
f := image.PrimaryFile()
|
||||
// f := image.PrimaryFile()
|
||||
|
||||
ret.ModTime = json.JSONTime{Time: f.ModTime}
|
||||
ret.Size = f.Size
|
||||
ret.Width = f.Width
|
||||
ret.Height = f.Height
|
||||
// ret.ModTime = json.JSONTime{Time: f.ModTime}
|
||||
// ret.Size = f.Size
|
||||
// ret.Width = f.Width
|
||||
// ret.Height = f.Height
|
||||
|
||||
return ret
|
||||
}
|
||||
// return ret
|
||||
// }
|
||||
|
||||
// GetStudioName returns the name of the provided image's studio. It returns an
|
||||
// empty string if there is no studio assigned to the image.
|
||||
|
|
|
@ -1,165 +1,144 @@
|
|||
package image
|
||||
|
||||
// import (
|
||||
// "errors"
|
||||
import (
|
||||
"errors"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/file"
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
// "testing"
|
||||
// "time"
|
||||
// )
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// const (
|
||||
// imageID = 1
|
||||
// errImageID = 3
|
||||
const (
|
||||
imageID = 1
|
||||
|
||||
// studioID = 4
|
||||
// missingStudioID = 5
|
||||
// errStudioID = 6
|
||||
// )
|
||||
studioID = 4
|
||||
missingStudioID = 5
|
||||
errStudioID = 6
|
||||
)
|
||||
|
||||
// var (
|
||||
// checksum = "checksum"
|
||||
// title = "title"
|
||||
// rating = 5
|
||||
// organized = true
|
||||
// ocounter = 2
|
||||
// size int64 = 123
|
||||
// width = 100
|
||||
// height = 100
|
||||
// )
|
||||
var (
|
||||
title = "title"
|
||||
rating = 5
|
||||
organized = true
|
||||
ocounter = 2
|
||||
)
|
||||
|
||||
// const (
|
||||
// studioName = "studioName"
|
||||
// )
|
||||
const (
|
||||
studioName = "studioName"
|
||||
)
|
||||
|
||||
// var (
|
||||
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// )
|
||||
var (
|
||||
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
// func createFullImage(id int) models.Image {
|
||||
// return models.Image{
|
||||
// ID: id,
|
||||
// Title: title,
|
||||
// Files: []*file.ImageFile{
|
||||
// {
|
||||
// BaseFile: &file.BaseFile{
|
||||
// Size: size,
|
||||
// },
|
||||
// Height: height,
|
||||
// Width: width,
|
||||
// },
|
||||
// },
|
||||
// OCounter: ocounter,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// CreatedAt: createTime,
|
||||
// UpdatedAt: updateTime,
|
||||
// }
|
||||
// }
|
||||
func createFullImage(id int) models.Image {
|
||||
return models.Image{
|
||||
ID: id,
|
||||
Title: title,
|
||||
OCounter: ocounter,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
}
|
||||
}
|
||||
|
||||
// func createFullJSONImage() *jsonschema.Image {
|
||||
// return &jsonschema.Image{
|
||||
// Title: title,
|
||||
// Checksum: checksum,
|
||||
// OCounter: ocounter,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// File: &jsonschema.ImageFile{
|
||||
// Height: height,
|
||||
// Size: size,
|
||||
// Width: width,
|
||||
// },
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createTime,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updateTime,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
func createFullJSONImage() *jsonschema.Image {
|
||||
return &jsonschema.Image{
|
||||
Title: title,
|
||||
OCounter: ocounter,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updateTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// type basicTestScenario struct {
|
||||
// input models.Image
|
||||
// expected *jsonschema.Image
|
||||
// }
|
||||
type basicTestScenario struct {
|
||||
input models.Image
|
||||
expected *jsonschema.Image
|
||||
}
|
||||
|
||||
// var scenarios = []basicTestScenario{
|
||||
// {
|
||||
// createFullImage(imageID),
|
||||
// createFullJSONImage(),
|
||||
// },
|
||||
// }
|
||||
var scenarios = []basicTestScenario{
|
||||
{
|
||||
createFullImage(imageID),
|
||||
createFullJSONImage(),
|
||||
},
|
||||
}
|
||||
|
||||
// func TestToJSON(t *testing.T) {
|
||||
// for i, s := range scenarios {
|
||||
// image := s.input
|
||||
// json := ToBasicJSON(&image)
|
||||
func TestToJSON(t *testing.T) {
|
||||
for i, s := range scenarios {
|
||||
image := s.input
|
||||
json := ToBasicJSON(&image)
|
||||
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// func createStudioImage(studioID int) models.Image {
|
||||
// return models.Image{
|
||||
// StudioID: &studioID,
|
||||
// }
|
||||
// }
|
||||
func createStudioImage(studioID int) models.Image {
|
||||
return models.Image{
|
||||
StudioID: &studioID,
|
||||
}
|
||||
}
|
||||
|
||||
// type stringTestScenario struct {
|
||||
// input models.Image
|
||||
// expected string
|
||||
// err bool
|
||||
// }
|
||||
type stringTestScenario struct {
|
||||
input models.Image
|
||||
expected string
|
||||
err bool
|
||||
}
|
||||
|
||||
// var getStudioScenarios = []stringTestScenario{
|
||||
// {
|
||||
// createStudioImage(studioID),
|
||||
// studioName,
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioImage(missingStudioID),
|
||||
// "",
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioImage(errStudioID),
|
||||
// "",
|
||||
// true,
|
||||
// },
|
||||
// }
|
||||
var getStudioScenarios = []stringTestScenario{
|
||||
{
|
||||
createStudioImage(studioID),
|
||||
studioName,
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioImage(missingStudioID),
|
||||
"",
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioImage(errStudioID),
|
||||
"",
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestGetStudioName(t *testing.T) {
|
||||
// mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
func TestGetStudioName(t *testing.T) {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
|
||||
// studioErr := errors.New("error getting image")
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
// Name: models.NullString(studioName),
|
||||
// }, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
// for i, s := range getStudioScenarios {
|
||||
// image := s.input
|
||||
// json, err := GetStudioName(testCtx, mockStudioReader, &image)
|
||||
for i, s := range getStudioScenarios {
|
||||
image := s.input
|
||||
json, err := GetStudioName(testCtx, mockStudioReader, &image)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// mockStudioReader.AssertExpectations(t)
|
||||
// }
|
||||
mockStudioReader.AssertExpectations(t)
|
||||
}
|
||||
|
|
|
@ -3,8 +3,10 @@ package image
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
|
@ -13,8 +15,9 @@ import (
|
|||
"github.com/stashapp/stash/pkg/tag"
|
||||
)
|
||||
|
||||
type GalleryChecksumsFinder interface {
|
||||
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
||||
type GalleryFinder interface {
|
||||
FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
|
||||
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
|
||||
}
|
||||
|
||||
type FullCreatorUpdater interface {
|
||||
|
@ -24,12 +27,12 @@ type FullCreatorUpdater interface {
|
|||
|
||||
type Importer struct {
|
||||
ReaderWriter FullCreatorUpdater
|
||||
FileFinder file.Getter
|
||||
StudioWriter studio.NameFinderCreator
|
||||
GalleryWriter GalleryChecksumsFinder
|
||||
GalleryFinder GalleryFinder
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
Input jsonschema.Image
|
||||
Path string
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
ID int
|
||||
|
@ -39,6 +42,10 @@ type Importer struct {
|
|||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.image = i.imageJSONToImage(i.Input)
|
||||
|
||||
if err := i.populateFiles(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -65,6 +72,12 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
|||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
|
||||
Title: imageJSON.Title,
|
||||
Organized: imageJSON.Organized,
|
||||
OCounter: imageJSON.OCounter,
|
||||
CreatedAt: imageJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: imageJSON.UpdatedAt.GetTime(),
|
||||
}
|
||||
|
||||
if imageJSON.Title != "" {
|
||||
|
@ -74,26 +87,27 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
|||
newImage.Rating = &imageJSON.Rating
|
||||
}
|
||||
|
||||
newImage.Organized = imageJSON.Organized
|
||||
newImage.OCounter = imageJSON.OCounter
|
||||
newImage.CreatedAt = imageJSON.CreatedAt.GetTime()
|
||||
newImage.UpdatedAt = imageJSON.UpdatedAt.GetTime()
|
||||
|
||||
// if imageJSON.File != nil {
|
||||
// if imageJSON.File.Size != 0 {
|
||||
// newImage.Size = &imageJSON.File.Size
|
||||
// }
|
||||
// if imageJSON.File.Width != 0 {
|
||||
// newImage.Width = &imageJSON.File.Width
|
||||
// }
|
||||
// if imageJSON.File.Height != 0 {
|
||||
// newImage.Height = &imageJSON.File.Height
|
||||
// }
|
||||
// }
|
||||
|
||||
return newImage
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("image file '%s' not found", path)
|
||||
} else {
|
||||
i.image.Files = append(i.image.Files, f.(*file.ImageFile))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateStudio(ctx context.Context) error {
|
||||
if i.Input.Studio != "" {
|
||||
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
|
||||
|
@ -136,16 +150,45 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
|
|||
return created.ID, nil
|
||||
}
|
||||
|
||||
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case ref.FolderPath != "":
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
|
||||
case len(ref.ZipFiles) > 0:
|
||||
for _, p := range ref.ZipFiles {
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if len(galleries) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case ref.Title != "":
|
||||
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
|
||||
}
|
||||
|
||||
var ret *models.Gallery
|
||||
if len(galleries) > 0 {
|
||||
ret = galleries[0]
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (i *Importer) populateGalleries(ctx context.Context) error {
|
||||
for _, checksum := range i.Input.Galleries {
|
||||
gallery, err := i.GalleryWriter.FindByChecksums(ctx, []string{checksum})
|
||||
for _, ref := range i.Input.Galleries {
|
||||
gallery, err := i.locateGallery(ctx, ref)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding gallery: %v", err)
|
||||
}
|
||||
|
||||
if len(gallery) == 0 {
|
||||
if gallery == nil {
|
||||
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||
return fmt.Errorf("image gallery '%s' not found", i.Input.Studio)
|
||||
return fmt.Errorf("image gallery '%s' not found", ref.String())
|
||||
}
|
||||
|
||||
// we don't create galleries - just ignore
|
||||
|
@ -153,7 +196,7 @@ func (i *Importer) populateGalleries(ctx context.Context) error {
|
|||
continue
|
||||
}
|
||||
} else {
|
||||
i.image.GalleryIDs.Add(gallery[0].ID)
|
||||
i.image.GalleryIDs.Add(gallery.ID)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -242,28 +285,46 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
|
|||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if len(i.Input.Files) > 0 {
|
||||
return i.Input.Files[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// var existing []*models.Image
|
||||
// var err error
|
||||
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
var existing []*models.Image
|
||||
var err error
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
for _, f := range i.image.Files {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if len(existing) > 0 {
|
||||
// id := existing[0].ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{Image: &i.image})
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.image.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
|
||||
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{
|
||||
Image: &i.image,
|
||||
FileIDs: fileIDs,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating image: %v", err)
|
||||
}
|
||||
|
|
|
@ -1,492 +1,285 @@
|
|||
package image
|
||||
|
||||
// import (
|
||||
// "context"
|
||||
// "errors"
|
||||
// "testing"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
// "github.com/stretchr/testify/mock"
|
||||
// )
|
||||
|
||||
// var (
|
||||
// path = "path"
|
||||
|
||||
// imageNameErr = "imageNameErr"
|
||||
// // existingImageName = "existingImageName"
|
||||
|
||||
// existingImageID = 100
|
||||
// existingStudioID = 101
|
||||
// existingGalleryID = 102
|
||||
// existingPerformerID = 103
|
||||
// // existingMovieID = 104
|
||||
// existingTagID = 105
|
||||
|
||||
// existingStudioName = "existingStudioName"
|
||||
// existingStudioErr = "existingStudioErr"
|
||||
// missingStudioName = "missingStudioName"
|
||||
|
||||
// existingGalleryChecksum = "existingGalleryChecksum"
|
||||
// existingGalleryErr = "existingGalleryErr"
|
||||
// missingGalleryChecksum = "missingGalleryChecksum"
|
||||
|
||||
// existingPerformerName = "existingPerformerName"
|
||||
// existingPerformerErr = "existingPerformerErr"
|
||||
// missingPerformerName = "missingPerformerName"
|
||||
|
||||
// existingTagName = "existingTagName"
|
||||
// existingTagErr = "existingTagErr"
|
||||
// missingTagName = "missingTagName"
|
||||
|
||||
// missingChecksum = "missingChecksum"
|
||||
// errChecksum = "errChecksum"
|
||||
// )
|
||||
|
||||
// var testCtx = context.Background()
|
||||
|
||||
// func TestImporterName(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{},
|
||||
// }
|
||||
|
||||
// assert.Equal(t, path, i.Name())
|
||||
// }
|
||||
|
||||
// func TestImporterPreImport(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// }
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: existingStudioName,
|
||||
// },
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil).Once()
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
// i.Input.Studio = existingStudioErr
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithGallery(t *testing.T) {
|
||||
// galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// GalleryWriter: galleryReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Galleries: []string{
|
||||
// existingGalleryChecksum,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryChecksum}).Return([]*models.Gallery{{
|
||||
// ID: existingGalleryID,
|
||||
// }}, nil).Once()
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryErr}).Return(nil, errors.New("FindByChecksum error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingGalleryID, i.image.GalleryIDs[0])
|
||||
|
||||
// i.Input.Galleries = []string{
|
||||
// existingGalleryErr,
|
||||
// }
|
||||
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// galleryReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingGallery(t *testing.T) {
|
||||
// galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// GalleryWriter: galleryReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Galleries: []string{
|
||||
// missingGalleryChecksum,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{missingGalleryChecksum}).Return(nil, nil).Times(3)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Nil(t, i.image.GalleryIDs)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Nil(t, i.image.GalleryIDs)
|
||||
|
||||
// galleryReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Path: path,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// existingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
// {
|
||||
// ID: existingPerformerID,
|
||||
// Name: models.NullString(existingPerformerName),
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs)
|
||||
|
||||
// i.Input.Performers = []string{existingPerformerErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
// ID: existingPerformerID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Path: path,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// existingTagName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
// {
|
||||
// ID: existingTagID,
|
||||
// Name: existingTagName,
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
|
||||
|
||||
// i.Input.Tags = []string{existingTagErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
// ID: existingTagID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterFindExistingID(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Checksum: missingChecksum,
|
||||
// },
|
||||
// }
|
||||
|
||||
// expectedErr := errors.New("FindBy* error")
|
||||
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Image{
|
||||
// ID: existingImageID,
|
||||
// }, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
|
||||
|
||||
// id, err := i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = checksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Equal(t, existingImageID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = errChecksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestCreate(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// image := models.Image{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// imageErr := models.Image{
|
||||
// Title: imageNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// image: image,
|
||||
// }
|
||||
|
||||
// errCreate := errors.New("Create error")
|
||||
// readerWriter.On("Create", testCtx, &image).Run(func(args mock.Arguments) {
|
||||
// args.Get(1).(*models.Image).ID = imageID
|
||||
// }).Return(nil).Once()
|
||||
// readerWriter.On("Create", testCtx, &imageErr).Return(errCreate).Once()
|
||||
|
||||
// id, err := i.Create(testCtx)
|
||||
// assert.Equal(t, imageID, *id)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, imageID, i.ID)
|
||||
|
||||
// i.image = imageErr
|
||||
// id, err = i.Create(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestUpdate(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// image := models.Image{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// imageErr := models.Image{
|
||||
// Title: imageNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// image: image,
|
||||
// }
|
||||
|
||||
// errUpdate := errors.New("Update error")
|
||||
|
||||
// // id needs to be set for the mock input
|
||||
// image.ID = imageID
|
||||
// readerWriter.On("Update", testCtx, &image).Return(nil).Once()
|
||||
|
||||
// err := i.Update(testCtx, imageID)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, imageID, i.ID)
|
||||
|
||||
// i.image = imageErr
|
||||
|
||||
// // need to set id separately
|
||||
// imageErr.ID = errImageID
|
||||
// readerWriter.On("Update", testCtx, &imageErr).Return(errUpdate).Once()
|
||||
|
||||
// err = i.Update(testCtx, errImageID)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var (
|
||||
existingStudioID = 101
|
||||
existingPerformerID = 103
|
||||
existingTagID = 105
|
||||
|
||||
existingStudioName = "existingStudioName"
|
||||
existingStudioErr = "existingStudioErr"
|
||||
missingStudioName = "missingStudioName"
|
||||
|
||||
existingPerformerName = "existingPerformerName"
|
||||
existingPerformerErr = "existingPerformerErr"
|
||||
missingPerformerName = "missingPerformerName"
|
||||
|
||||
existingTagName = "existingTagName"
|
||||
existingTagErr = "existingTagErr"
|
||||
missingTagName = "missingTagName"
|
||||
)
|
||||
|
||||
var testCtx = context.Background()
|
||||
|
||||
func TestImporterPreImport(t *testing.T) {
|
||||
i := Importer{}
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: existingStudioName,
|
||||
},
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil).Once()
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
i.Input.Studio = existingStudioErr
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
existingPerformerName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
|
||||
|
||||
i.Input.Performers = []string{existingPerformerErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
ID: existingPerformerID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
existingTagName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, nil).Once()
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
|
||||
|
||||
i.Input.Tags = []string{existingTagErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,156 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
const (
|
||||
DirEntryTypeFolder = "folder"
|
||||
DirEntryTypeVideo = "video"
|
||||
DirEntryTypeImage = "image"
|
||||
DirEntryTypeFile = "file"
|
||||
)
|
||||
|
||||
type DirEntry interface {
|
||||
IsFile() bool
|
||||
Filename() string
|
||||
DirEntry() *BaseDirEntry
|
||||
}
|
||||
|
||||
type BaseDirEntry struct {
|
||||
ZipFile string `json:"zip_file,omitempty"`
|
||||
ModTime json.JSONTime `json:"mod_time"`
|
||||
|
||||
Type string `json:"type,omitempty"`
|
||||
|
||||
Path string `json:"path,omitempty"`
|
||||
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) DirEntry() *BaseDirEntry {
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) IsFile() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) Filename() string {
|
||||
// prefix with the path depth so that we can import lower-level files/folders first
|
||||
depth := strings.Count(f.Path, string("/"))
|
||||
|
||||
// hash the full path for a unique filename
|
||||
hash := md5.FromString(f.Path)
|
||||
|
||||
basename := path.Base(f.Path)
|
||||
|
||||
return fmt.Sprintf("%02x.%s.%s.json", depth, basename, hash)
|
||||
}
|
||||
|
||||
type BaseFile struct {
|
||||
BaseDirEntry
|
||||
|
||||
Fingerprints []Fingerprint `json:"fingerprints,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
func (f *BaseFile) IsFile() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type Fingerprint struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
Fingerprint interface{} `json:"fingerprint,omitempty"`
|
||||
}
|
||||
|
||||
type VideoFile struct {
|
||||
*BaseFile
|
||||
Format string `json:"format,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Duration float64 `json:"duration,omitempty"`
|
||||
VideoCodec string `json:"video_codec,omitempty"`
|
||||
AudioCodec string `json:"audio_codec,omitempty"`
|
||||
FrameRate float64 `json:"frame_rate,omitempty"`
|
||||
BitRate int64 `json:"bitrate,omitempty"`
|
||||
|
||||
Interactive bool `json:"interactive,omitempty"`
|
||||
InteractiveSpeed *int `json:"interactive_speed,omitempty"`
|
||||
}
|
||||
|
||||
type ImageFile struct {
|
||||
*BaseFile
|
||||
Format string `json:"format,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
}
|
||||
|
||||
func LoadFileFile(filePath string) (DirEntry, error) {
|
||||
r, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(bytes.NewReader(data))
|
||||
|
||||
var bf BaseDirEntry
|
||||
if err := jsonParser.Decode(&bf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jsonParser = json.NewDecoder(bytes.NewReader(data))
|
||||
|
||||
switch bf.Type {
|
||||
case DirEntryTypeFolder:
|
||||
return &bf, nil
|
||||
case DirEntryTypeVideo:
|
||||
var vf VideoFile
|
||||
if err := jsonParser.Decode(&vf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &vf, nil
|
||||
case DirEntryTypeImage:
|
||||
var imf ImageFile
|
||||
if err := jsonParser.Decode(&imf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &imf, nil
|
||||
case DirEntryTypeFile:
|
||||
var bff BaseFile
|
||||
if err := jsonParser.Decode(&bff); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &bff, nil
|
||||
default:
|
||||
return nil, errors.New("unknown file type")
|
||||
}
|
||||
}
|
||||
|
||||
func SaveFileFile(filePath string, file DirEntry) error {
|
||||
if file == nil {
|
||||
return fmt.Errorf("file must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, file)
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type Folder struct {
|
||||
BaseDirEntry
|
||||
|
||||
Path string `json:"path,omitempty"`
|
||||
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (f *Folder) Filename() string {
|
||||
// prefix with the path depth so that we can import lower-level folders first
|
||||
depth := strings.Count(f.Path, string("/"))
|
||||
|
||||
// hash the full path for a unique filename
|
||||
hash := md5.FromString(f.Path)
|
||||
|
||||
basename := path.Base(f.Path)
|
||||
|
||||
return fmt.Sprintf("%2x.%s.%s.json", depth, basename, hash)
|
||||
}
|
||||
|
||||
func LoadFolderFile(filePath string) (*Folder, error) {
|
||||
var folder Folder
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&folder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &folder, nil
|
||||
}
|
||||
|
||||
func SaveFolderFile(filePath string, folder *Folder) error {
|
||||
if folder == nil {
|
||||
return fmt.Errorf("folder must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, folder)
|
||||
}
|
|
@ -3,27 +3,37 @@ package jsonschema
|
|||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type Gallery struct {
|
||||
Path string `json:"path,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Zip bool `json:"zip,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
FileModTime json.JSONTime `json:"file_mod_time,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
ZipFiles []string `json:"zip_files,omitempty"`
|
||||
FolderPath string `json:"folder_path,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Gallery) Filename(basename string, hash string) string {
|
||||
ret := basename
|
||||
|
||||
if ret != "" {
|
||||
ret += "."
|
||||
}
|
||||
ret += hash
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadGalleryFile(filePath string) (*Gallery, error) {
|
||||
|
@ -48,3 +58,23 @@ func SaveGalleryFile(filePath string, gallery *Gallery) error {
|
|||
}
|
||||
return marshalToFile(filePath, gallery)
|
||||
}
|
||||
|
||||
// GalleryRef is used to identify a Gallery.
|
||||
// Only one field should be populated.
|
||||
type GalleryRef struct {
|
||||
ZipFiles []string `json:"zip_files,omitempty"`
|
||||
FolderPath string `json:"folder_path,omitempty"`
|
||||
// Title is used only if FolderPath and ZipPaths is empty
|
||||
Title string `json:"title,omitempty"`
|
||||
}
|
||||
|
||||
func (r GalleryRef) String() string {
|
||||
switch {
|
||||
case r.FolderPath != "":
|
||||
return "{ folder: " + r.FolderPath + " }"
|
||||
case len(r.ZipFiles) > 0:
|
||||
return "{ zipFiles: [" + strings.Join(r.ZipFiles, ", ") + "] }"
|
||||
default:
|
||||
return "{ title: " + r.Title + " }"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,28 +8,33 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type ImageFile struct {
|
||||
ModTime json.JSONTime `json:"mod_time,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
}
|
||||
|
||||
type Image struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
OCounter int `json:"o_counter,omitempty"`
|
||||
Galleries []string `json:"galleries,omitempty"`
|
||||
Galleries []GalleryRef `json:"galleries,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
File *ImageFile `json:"file,omitempty"`
|
||||
Files []string `json:"files,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Image) Filename(basename string, hash string) string {
|
||||
ret := s.Title
|
||||
if ret == "" {
|
||||
ret = basename
|
||||
}
|
||||
|
||||
if hash != "" {
|
||||
ret += "." + hash
|
||||
}
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadImageFile(filePath string) (*Image, error) {
|
||||
var image Image
|
||||
file, err := os.Open(filePath)
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
type PathNameMapping struct {
|
||||
Path string `json:"path,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type Mappings struct {
|
||||
Tags []PathNameMapping `json:"tags"`
|
||||
Performers []PathNameMapping `json:"performers"`
|
||||
Studios []PathNameMapping `json:"studios"`
|
||||
Movies []PathNameMapping `json:"movies"`
|
||||
Galleries []PathNameMapping `json:"galleries"`
|
||||
Scenes []PathNameMapping `json:"scenes"`
|
||||
Images []PathNameMapping `json:"images"`
|
||||
}
|
||||
|
||||
func LoadMappingsFile(filePath string) (*Mappings, error) {
|
||||
var mappings Mappings
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&mappings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &mappings, nil
|
||||
}
|
||||
|
||||
func SaveMappingsFile(filePath string, mappings *Mappings) error {
|
||||
if mappings == nil {
|
||||
return fmt.Errorf("mappings must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, mappings)
|
||||
}
|
|
@ -26,6 +26,10 @@ type Movie struct {
|
|||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Movie) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
// Backwards Compatible synopsis for the movie
|
||||
type MovieSynopsisBC struct {
|
||||
Synopsis string `json:"sypnopsis,omitempty"`
|
||||
|
|
|
@ -40,6 +40,10 @@ type Performer struct {
|
|||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
}
|
||||
|
||||
func (s Performer) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadPerformerFile(filePath string) (*Performer, error) {
|
||||
var performer Performer
|
||||
file, err := os.Open(filePath)
|
||||
|
|
|
@ -38,9 +38,6 @@ type SceneMovie struct {
|
|||
|
||||
type Scene struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
OSHash string `json:"oshash,omitempty"`
|
||||
Phash string `json:"phash,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
|
@ -48,18 +45,31 @@ type Scene struct {
|
|||
Organized bool `json:"organized,omitempty"`
|
||||
OCounter int `json:"o_counter,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Galleries []string `json:"galleries,omitempty"`
|
||||
Galleries []GalleryRef `json:"galleries,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Movies []SceneMovie `json:"movies,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
Markers []SceneMarker `json:"markers,omitempty"`
|
||||
File *SceneFile `json:"file,omitempty"`
|
||||
Files []string `json:"files,omitempty"`
|
||||
Cover string `json:"cover,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||
}
|
||||
|
||||
func (s Scene) Filename(basename string, hash string) string {
|
||||
ret := s.Title
|
||||
if ret == "" {
|
||||
ret = basename
|
||||
}
|
||||
|
||||
if hash != "" {
|
||||
ret += "." + hash
|
||||
}
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadSceneFile(filePath string) (*Scene, error) {
|
||||
var scene Scene
|
||||
file, err := os.Open(filePath)
|
||||
|
|
|
@ -23,6 +23,10 @@ type Studio struct {
|
|||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
}
|
||||
|
||||
func (s Studio) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadStudioFile(filePath string) (*Studio, error) {
|
||||
var studio Studio
|
||||
file, err := os.Open(filePath)
|
||||
|
|
|
@ -18,6 +18,10 @@ type Tag struct {
|
|||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Tag) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadTagFile(filePath string) (*Tag, error) {
|
||||
var tag Tag
|
||||
file, err := os.Open(filePath)
|
||||
|
|
|
@ -10,8 +10,7 @@ import (
|
|||
type JSONPaths struct {
|
||||
Metadata string
|
||||
|
||||
MappingsFile string
|
||||
ScrapedFile string
|
||||
ScrapedFile string
|
||||
|
||||
Performers string
|
||||
Scenes string
|
||||
|
@ -20,12 +19,12 @@ type JSONPaths struct {
|
|||
Studios string
|
||||
Tags string
|
||||
Movies string
|
||||
Files string
|
||||
}
|
||||
|
||||
func newJSONPaths(baseDir string) *JSONPaths {
|
||||
jp := JSONPaths{}
|
||||
jp.Metadata = baseDir
|
||||
jp.MappingsFile = filepath.Join(baseDir, "mappings.json")
|
||||
jp.ScrapedFile = filepath.Join(baseDir, "scraped.json")
|
||||
jp.Performers = filepath.Join(baseDir, "performers")
|
||||
jp.Scenes = filepath.Join(baseDir, "scenes")
|
||||
|
@ -34,6 +33,7 @@ func newJSONPaths(baseDir string) *JSONPaths {
|
|||
jp.Studios = filepath.Join(baseDir, "studios")
|
||||
jp.Movies = filepath.Join(baseDir, "movies")
|
||||
jp.Tags = filepath.Join(baseDir, "tags")
|
||||
jp.Files = filepath.Join(baseDir, "files")
|
||||
return &jp
|
||||
}
|
||||
|
||||
|
@ -42,6 +42,18 @@ func GetJSONPaths(baseDir string) *JSONPaths {
|
|||
return jp
|
||||
}
|
||||
|
||||
func EmptyJSONDirs(baseDir string) {
|
||||
jsonPaths := GetJSONPaths(baseDir)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Scenes)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Images)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Galleries)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Performers)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Studios)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Movies)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Tags)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Files)
|
||||
}
|
||||
|
||||
func EnsureJSONDirs(baseDir string) {
|
||||
jsonPaths := GetJSONPaths(baseDir)
|
||||
if err := fsutil.EnsureDir(jsonPaths.Metadata); err != nil {
|
||||
|
@ -68,32 +80,7 @@ func EnsureJSONDirs(baseDir string) {
|
|||
if err := fsutil.EnsureDir(jsonPaths.Tags); err != nil {
|
||||
logger.Warnf("couldn't create directories for Tags: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) PerformerJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Performers, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) SceneJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Scenes, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) ImageJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Images, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) GalleryJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Galleries, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) StudioJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Studios, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) TagJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Tags, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) MovieJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Movies, checksum+".json")
|
||||
if err := fsutil.EnsureDir(jsonPaths.Files); err != nil {
|
||||
logger.Warnf("couldn't create directories for Files: %v", err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,18 +45,6 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
|||
UpdatedAt: json.JSONTime{Time: scene.UpdatedAt},
|
||||
}
|
||||
|
||||
// if scene.Checksum != nil {
|
||||
// newSceneJSON.Checksum = *scene.Checksum
|
||||
// }
|
||||
|
||||
// if scene.OSHash != nil {
|
||||
// newSceneJSON.OSHash = *scene.OSHash
|
||||
// }
|
||||
|
||||
// if scene.Phash != nil {
|
||||
// newSceneJSON.Phash = utils.PhashToString(*scene.Phash)
|
||||
// }
|
||||
|
||||
if scene.Date != nil {
|
||||
newSceneJSON.Date = scene.Date.String()
|
||||
}
|
||||
|
@ -68,7 +56,9 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
|||
newSceneJSON.Organized = scene.Organized
|
||||
newSceneJSON.OCounter = scene.OCounter
|
||||
|
||||
newSceneJSON.File = getSceneFileJSON(scene)
|
||||
for _, f := range scene.Files {
|
||||
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
cover, err := reader.GetCover(ctx, scene.ID)
|
||||
if err != nil {
|
||||
|
@ -93,52 +83,52 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
|||
return &newSceneJSON, nil
|
||||
}
|
||||
|
||||
func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
|
||||
ret := &jsonschema.SceneFile{}
|
||||
// func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
|
||||
// ret := &jsonschema.SceneFile{}
|
||||
|
||||
// TODO
|
||||
// if scene.FileModTime != nil {
|
||||
// ret.ModTime = json.JSONTime{Time: *scene.FileModTime}
|
||||
// }
|
||||
// TODO
|
||||
// if scene.FileModTime != nil {
|
||||
// ret.ModTime = json.JSONTime{Time: *scene.FileModTime}
|
||||
// }
|
||||
|
||||
// if scene.Size != nil {
|
||||
// ret.Size = *scene.Size
|
||||
// }
|
||||
// if scene.Size != nil {
|
||||
// ret.Size = *scene.Size
|
||||
// }
|
||||
|
||||
// if scene.Duration != nil {
|
||||
// ret.Duration = getDecimalString(*scene.Duration)
|
||||
// }
|
||||
// if scene.Duration != nil {
|
||||
// ret.Duration = getDecimalString(*scene.Duration)
|
||||
// }
|
||||
|
||||
// if scene.VideoCodec != nil {
|
||||
// ret.VideoCodec = *scene.VideoCodec
|
||||
// }
|
||||
// if scene.VideoCodec != nil {
|
||||
// ret.VideoCodec = *scene.VideoCodec
|
||||
// }
|
||||
|
||||
// if scene.AudioCodec != nil {
|
||||
// ret.AudioCodec = *scene.AudioCodec
|
||||
// }
|
||||
// if scene.AudioCodec != nil {
|
||||
// ret.AudioCodec = *scene.AudioCodec
|
||||
// }
|
||||
|
||||
// if scene.Format != nil {
|
||||
// ret.Format = *scene.Format
|
||||
// }
|
||||
// if scene.Format != nil {
|
||||
// ret.Format = *scene.Format
|
||||
// }
|
||||
|
||||
// if scene.Width != nil {
|
||||
// ret.Width = *scene.Width
|
||||
// }
|
||||
// if scene.Width != nil {
|
||||
// ret.Width = *scene.Width
|
||||
// }
|
||||
|
||||
// if scene.Height != nil {
|
||||
// ret.Height = *scene.Height
|
||||
// }
|
||||
// if scene.Height != nil {
|
||||
// ret.Height = *scene.Height
|
||||
// }
|
||||
|
||||
// if scene.Framerate != nil {
|
||||
// ret.Framerate = getDecimalString(*scene.Framerate)
|
||||
// }
|
||||
// if scene.Framerate != nil {
|
||||
// ret.Framerate = getDecimalString(*scene.Framerate)
|
||||
// }
|
||||
|
||||
// if scene.Bitrate != nil {
|
||||
// ret.Bitrate = int(*scene.Bitrate)
|
||||
// }
|
||||
// if scene.Bitrate != nil {
|
||||
// ret.Bitrate = int(*scene.Bitrate)
|
||||
// }
|
||||
|
||||
return ret
|
||||
}
|
||||
// return ret
|
||||
// }
|
||||
|
||||
// GetStudioName returns the name of the provided scene's studio. It returns an
|
||||
// empty string if there is no studio assigned to the scene.
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,8 +3,10 @@ package scene
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
|
@ -24,13 +26,13 @@ type FullCreatorUpdater interface {
|
|||
|
||||
type Importer struct {
|
||||
ReaderWriter FullCreatorUpdater
|
||||
FileFinder file.Getter
|
||||
StudioWriter studio.NameFinderCreator
|
||||
GalleryWriter gallery.ChecksumsFinder
|
||||
GalleryFinder gallery.Finder
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
MovieWriter movie.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
Input jsonschema.Scene
|
||||
Path string
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
FileNamingAlgorithm models.HashAlgorithm
|
||||
|
||||
|
@ -42,6 +44,10 @@ type Importer struct {
|
|||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.scene = i.sceneJSONToScene(i.Input)
|
||||
|
||||
if err := i.populateFiles(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -86,21 +92,6 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
|||
StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs),
|
||||
}
|
||||
|
||||
// if sceneJSON.Checksum != "" {
|
||||
// newScene.Checksum = &sceneJSON.Checksum
|
||||
// }
|
||||
// if sceneJSON.OSHash != "" {
|
||||
// newScene.OSHash = &sceneJSON.OSHash
|
||||
// }
|
||||
|
||||
// if sceneJSON.Phash != "" {
|
||||
// hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64)
|
||||
// if err == nil {
|
||||
// v := int64(hash)
|
||||
// newScene.Phash = &v
|
||||
// }
|
||||
// }
|
||||
|
||||
if sceneJSON.Date != "" {
|
||||
d := models.NewDate(sceneJSON.Date)
|
||||
newScene.Date = &d
|
||||
|
@ -114,42 +105,27 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
|||
newScene.CreatedAt = sceneJSON.CreatedAt.GetTime()
|
||||
newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime()
|
||||
|
||||
// if sceneJSON.File != nil {
|
||||
// if sceneJSON.File.Size != "" {
|
||||
// newScene.Size = &sceneJSON.File.Size
|
||||
// }
|
||||
// if sceneJSON.File.Duration != "" {
|
||||
// duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||
// newScene.Duration = &duration
|
||||
// }
|
||||
// if sceneJSON.File.VideoCodec != "" {
|
||||
// newScene.VideoCodec = &sceneJSON.File.VideoCodec
|
||||
// }
|
||||
// if sceneJSON.File.AudioCodec != "" {
|
||||
// newScene.AudioCodec = &sceneJSON.File.AudioCodec
|
||||
// }
|
||||
// if sceneJSON.File.Format != "" {
|
||||
// newScene.Format = &sceneJSON.File.Format
|
||||
// }
|
||||
// if sceneJSON.File.Width != 0 {
|
||||
// newScene.Width = &sceneJSON.File.Width
|
||||
// }
|
||||
// if sceneJSON.File.Height != 0 {
|
||||
// newScene.Height = &sceneJSON.File.Height
|
||||
// }
|
||||
// if sceneJSON.File.Framerate != "" {
|
||||
// framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||
// newScene.Framerate = &framerate
|
||||
// }
|
||||
// if sceneJSON.File.Bitrate != 0 {
|
||||
// v := int64(sceneJSON.File.Bitrate)
|
||||
// newScene.Bitrate = &v
|
||||
// }
|
||||
// }
|
||||
|
||||
return newScene
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("scene file '%s' not found", path)
|
||||
} else {
|
||||
i.scene.Files = append(i.scene.Files, f.(*file.VideoFile))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateStudio(ctx context.Context) error {
|
||||
if i.Input.Studio != "" {
|
||||
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
|
||||
|
@ -192,33 +168,50 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
|
|||
return created.ID, nil
|
||||
}
|
||||
|
||||
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case ref.FolderPath != "":
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
|
||||
case len(ref.ZipFiles) > 0:
|
||||
for _, p := range ref.ZipFiles {
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if len(galleries) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case ref.Title != "":
|
||||
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
|
||||
}
|
||||
|
||||
var ret *models.Gallery
|
||||
if len(galleries) > 0 {
|
||||
ret = galleries[0]
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (i *Importer) populateGalleries(ctx context.Context) error {
|
||||
if len(i.Input.Galleries) > 0 {
|
||||
checksums := i.Input.Galleries
|
||||
galleries, err := i.GalleryWriter.FindByChecksums(ctx, checksums)
|
||||
for _, ref := range i.Input.Galleries {
|
||||
gallery, err := i.locateGallery(ctx, ref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var pluckedChecksums []string
|
||||
for _, gallery := range galleries {
|
||||
pluckedChecksums = append(pluckedChecksums, gallery.Checksum())
|
||||
}
|
||||
|
||||
missingGalleries := stringslice.StrFilter(checksums, func(checksum string) bool {
|
||||
return !stringslice.StrInclude(pluckedChecksums, checksum)
|
||||
})
|
||||
|
||||
if len(missingGalleries) > 0 {
|
||||
if gallery == nil {
|
||||
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||
return fmt.Errorf("scene galleries [%s] not found", strings.Join(missingGalleries, ", "))
|
||||
return fmt.Errorf("scene gallery '%s' not found", ref.String())
|
||||
}
|
||||
|
||||
// we don't create galleries - just ignore
|
||||
}
|
||||
|
||||
for _, o := range galleries {
|
||||
i.scene.GalleryIDs.Add(o.ID)
|
||||
} else {
|
||||
i.scene.GalleryIDs.Add(gallery.ID)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -366,37 +359,42 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
|
|||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if len(i.Input.Files) > 0 {
|
||||
return i.Input.Files[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// TODO
|
||||
// var existing []*models.Scene
|
||||
// var err error
|
||||
var existing []*models.Scene
|
||||
var err error
|
||||
|
||||
// switch i.FileNamingAlgorithm {
|
||||
// case models.HashAlgorithmMd5:
|
||||
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
// case models.HashAlgorithmOshash:
|
||||
// existing, err = i.ReaderWriter.FindByOSHash(ctx, i.Input.OSHash)
|
||||
// default:
|
||||
// panic("unknown file naming algorithm")
|
||||
// }
|
||||
for _, f := range i.scene.Files {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// if len(existing) > 0 {
|
||||
// id := existing[0].ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
if err := i.ReaderWriter.Create(ctx, &i.scene, nil); err != nil {
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.scene.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil {
|
||||
return nil, fmt.Errorf("error creating scene: %v", err)
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -173,8 +173,8 @@ type fileQueryRow struct {
|
|||
ParentFolderID null.Int `db:"parent_folder_id"`
|
||||
Size null.Int `db:"size"`
|
||||
ModTime null.Time `db:"mod_time"`
|
||||
CreatedAt null.Time `db:"created_at"`
|
||||
UpdatedAt null.Time `db:"updated_at"`
|
||||
CreatedAt null.Time `db:"file_created_at"`
|
||||
UpdatedAt null.Time `db:"file_updated_at"`
|
||||
|
||||
ZipBasename null.String `db:"zip_basename"`
|
||||
ZipFolderPath null.String `db:"zip_folder_path"`
|
||||
|
@ -445,8 +445,8 @@ func (qb *FileStore) selectDataset() *goqu.SelectDataset {
|
|||
table.Col("parent_folder_id"),
|
||||
table.Col("size"),
|
||||
table.Col("mod_time"),
|
||||
table.Col("created_at"),
|
||||
table.Col("updated_at"),
|
||||
table.Col("created_at").As("file_created_at"),
|
||||
table.Col("updated_at").As("file_updated_at"),
|
||||
folderTable.Col("path").As("parent_folder_path"),
|
||||
fingerprintTable.Col("type").As("fingerprint_type"),
|
||||
fingerprintTable.Col("fingerprint"),
|
||||
|
|
|
@ -443,7 +443,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
|
|||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
|
||||
).InnerJoin(
|
||||
).LeftJoin(
|
||||
filesTable,
|
||||
goqu.On(filesTable.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
|
@ -518,6 +518,26 @@ func (qb *GalleryStore) CountByImageID(ctx context.Context, imageID int) (int, e
|
|||
return count(ctx, q)
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) {
|
||||
table := qb.table()
|
||||
|
||||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
|
||||
).Select(table.Col(idColumn)).Where(
|
||||
table.Col("folder_id").IsNull(),
|
||||
galleriesFilesJoinTable.Col("file_id").IsNull(),
|
||||
table.Col("title").Eq(title),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting user galleries for title %s: %w", title, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) Count(ctx context.Context) (int, error) {
|
||||
q := dialect.Select(goqu.COUNT("*")).From(qb.table())
|
||||
return count(ctx, q)
|
||||
|
|
|
@ -6,9 +6,9 @@ After migrating, please run a scan on your entire library to populate missing da
|
|||
|
||||
Please report all issues to the following Github issue: https://github.com/stashapp/stash/issues/2737
|
||||
|
||||
### 💥 Known issues
|
||||
* Import/export functionality is currently disabled. Needs further design.
|
||||
### 💥 Known issues and other changes
|
||||
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
|
||||
* Import/export schema has changed and is incompatible with the previous version.
|
||||
|
||||
### ✨ New Features
|
||||
* Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676))
|
||||
|
|
|
@ -2,24 +2,37 @@
|
|||
|
||||
The metadata given to Stash can be exported into the JSON format. This structure can be modified, or replicated by other means. The resulting data can then be imported again, giving the possibility for automatic scraping of all kinds. The format of this metadata bulk is a folder structure, containing the following folders:
|
||||
|
||||
* `downloads`
|
||||
* `files`
|
||||
* `galleries`
|
||||
* `images`
|
||||
* `performers`
|
||||
* `scenes`
|
||||
* `studios`
|
||||
* `movies`
|
||||
|
||||
Additionally, it contains a `mappings.json` file.
|
||||
|
||||
The mappings file contains a reference to all files within the folders, by including their checksum. All files in the aforementioned folders are named by their checksum (like `967ddf2e028f10fc8d36901833c25732.json`), which (at least in the case of galleries and scenes) is generated from the file that this metadata relates to. The algorithm for the checksum is MD5.
|
||||
|
||||
# File naming
|
||||
|
||||
When exported, files are named with different formats depending on the object type:
|
||||
|
||||
| Type | Format |
|
||||
|------|--------|
|
||||
| Files/Folders | `<path depth in hex, two character width>.<basename>.<hash>.json` |
|
||||
| Galleries | `<first zip filename>.<path hash>.json` or `<folder basename>.<path hash>.json` or `<title>.json` |
|
||||
| Images | `<title or first file basename>.<hash>.json` |
|
||||
| Performers | `<name>.json` |
|
||||
| Scenes | `<title or first file basename>.<hash>.json` |
|
||||
| Studios | `<name>.json` |
|
||||
| Movies | `<name>.json` |
|
||||
|
||||
Note that the file naming is not significant when importing. All json files will be read from the subdirectories.
|
||||
|
||||
# Content of the json files
|
||||
|
||||
In the following, the values of the according jsons will be shown. If the value should be a number, it is written with after comma values (like `29.98` or `50.0`), but still as a string. The meaning from most of them should be obvious due to the previous explanation or from the possible values stash offers when editing, otherwise a short comment will be added.
|
||||
|
||||
The json values are given as strings, if not stated otherwise. Every new line will stand for a new value in the json. If the value is a list of objects, the values of that object will be shown indented.
|
||||
|
||||
If a value is empty in any but the `mappings.json` file, it can be left out of the file entirely. In the `mappings.json` however, all values must be present, if there are no objects of a type (for example, no performers), the value is simply null.
|
||||
If a value is empty in any file, it can be left out of the file entirely.
|
||||
Many files have an `created_at` and `updated_at`, both are kept in the following format:
|
||||
```
|
||||
YYYY-MM-DDThh:mm:ssTZD
|
||||
|
@ -29,22 +42,6 @@ Example:
|
|||
"created_at": "2019-05-03T21:36:58+01:00"
|
||||
```
|
||||
|
||||
## `mappings.json`
|
||||
```
|
||||
performers
|
||||
name
|
||||
checksum
|
||||
studios
|
||||
name
|
||||
checksum
|
||||
galleries
|
||||
path
|
||||
checksum
|
||||
scenes
|
||||
path
|
||||
checksum
|
||||
```
|
||||
|
||||
## Performer
|
||||
```
|
||||
name
|
||||
|
@ -112,100 +109,110 @@ created_at
|
|||
updated_at
|
||||
```
|
||||
|
||||
## Gallery
|
||||
|
||||
No files of this kind are generated yet.
|
||||
## Image
|
||||
```
|
||||
title
|
||||
studio
|
||||
rating (integer)
|
||||
performers (list of strings, performers name)
|
||||
tags (list of strings)
|
||||
files (list of path strings)
|
||||
galleries
|
||||
zip_files (list of path strings)
|
||||
folder_path
|
||||
title (for user-created gallery)
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
## Gallery
|
||||
```
|
||||
title
|
||||
studio
|
||||
url
|
||||
date
|
||||
rating (integer)
|
||||
details
|
||||
performers (list of strings, performers name)
|
||||
tags (list of strings)
|
||||
zip_files (list of path strings)
|
||||
folder_path
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
## Files
|
||||
|
||||
### Folder
|
||||
```
|
||||
zip_file (path to containing zip file)
|
||||
mod_time
|
||||
type (= folder)
|
||||
path
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
### Video file
|
||||
```
|
||||
zip_file (path to containing zip file)
|
||||
mod_time
|
||||
type (= video)
|
||||
path
|
||||
fingerprints
|
||||
type
|
||||
fingerprint
|
||||
size
|
||||
format
|
||||
width
|
||||
height
|
||||
duration
|
||||
video_codec
|
||||
audio_codec
|
||||
frame
|
||||
bitrate
|
||||
interactive (bool)
|
||||
interactive_speed (integer)
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
### Image file
|
||||
```
|
||||
zip_file (path to containing zip file)
|
||||
mod_time
|
||||
type (= image)
|
||||
path
|
||||
fingerprints
|
||||
type
|
||||
fingerprint
|
||||
size
|
||||
format
|
||||
width
|
||||
height
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
### Other files
|
||||
```
|
||||
zip_file (path to containing zip file)
|
||||
mod_time
|
||||
type (= file)
|
||||
path
|
||||
fingerprints
|
||||
type
|
||||
fingerprint
|
||||
size
|
||||
created_at
|
||||
updated_at
|
||||
```
|
||||
|
||||
# In JSON format
|
||||
|
||||
For those preferring the json-format, defined [here](https://json-schema.org/), the following format may be more interesting:
|
||||
|
||||
## mappings.json
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://github.com/stashapp/stash/wiki/JSON-Specification/mappings.json",
|
||||
"title": "mappings",
|
||||
"description": "The base file for the metadata. Referring to all other files with names, as well as providing the path to files.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"performers": {
|
||||
"description": "Link to the performers files along with names",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"checksum": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name", "checksum"]
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"studios": {
|
||||
"description": "Link to the studio files along with names",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"checksum": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name", "checksum"]
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"galleries": {
|
||||
"description": "Link to the gallery files along with the path to the content",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"checksum": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["path", "checksum"]
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"scenes": {
|
||||
"description": "Link to the scene files along with the path to the content",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"checksum": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["path", "checksum"]
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"required": ["performers", "studios", "galleries", "scenes"]
|
||||
}
|
||||
```
|
||||
## performer.json
|
||||
|
||||
``` json
|
||||
|
@ -439,45 +446,14 @@ For those preferring the json-format, defined [here](https://json-schema.org/),
|
|||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"file": {
|
||||
"description": "Some technical data about the scenes file.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"size": {
|
||||
"description": "The size of the file in bytes",
|
||||
"type": "string"
|
||||
},
|
||||
"duration": {
|
||||
"description": "Duration of the scene in seconds. It is given with after comma values, such as 10.0 or 17.5",
|
||||
"type": "string"
|
||||
},
|
||||
"video_codec": {
|
||||
"description": "The coding of the video part of the scene file. An example would be h264",
|
||||
"type": "string"
|
||||
},
|
||||
"audio_codec": {
|
||||
"description": "The coding of the audio part of the scene file. An example would be aac",
|
||||
"type": "string"
|
||||
},
|
||||
"width": {
|
||||
"description": "The width of the scene in pixels",
|
||||
"type": "integer"
|
||||
},
|
||||
"height": {
|
||||
"description": "The height of the scene in pixels",
|
||||
"type": "integer"
|
||||
},
|
||||
"framerate": {
|
||||
"description": "Framerate of the scene. It is given with after comma values, such as 29.95",
|
||||
"type": "string"
|
||||
},
|
||||
"bitrate": {
|
||||
"description": "The bitrate of the video, in bits",
|
||||
"type": "integer"
|
||||
}
|
||||
|
||||
"files": {
|
||||
"description": "A list of paths of the files for this scene",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"required": ["size", "duration", "video_codec", "audio_codec", "height", "width", "framerate", "bitrate"]
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"created_at": {
|
||||
"description": "The time this studios data was added to the database. Format is YYYY-MM-DDThh:mm:ssTZD",
|
||||
|
@ -491,7 +467,3 @@ For those preferring the json-format, defined [here](https://json-schema.org/),
|
|||
"required": ["files", "created_at", "updated_at"]
|
||||
}
|
||||
```
|
||||
|
||||
## Gallery
|
||||
|
||||
No files of this kind are created here yet
|
||||
|
|
|
@ -9,10 +9,10 @@ Please report all issues to the following Github issue: https://github.com/stash
|
|||
### **Warning:** if you are upgrading from an older `files-refactor` build, you will need to re-migrate your system from a schema version 31 database.
|
||||
|
||||
### 💥 Known issues
|
||||
* Import/export functionality is currently disabled. Needs further design.
|
||||
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
|
||||
|
||||
### Other changes:
|
||||
* Import/export schema has changed and is incompatible with the previous version.
|
||||
* Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744))
|
||||
* Changelog has been moved from the stats page to a section in the Settings page.
|
||||
* Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported.
|
||||
|
|
Loading…
Reference in New Issue