2019-02-09 12:30:49 +00:00
|
|
|
package manager
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
2020-06-15 11:34:39 +00:00
|
|
|
"math"
|
|
|
|
"runtime"
|
|
|
|
"strconv"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
"github.com/jmoiron/sqlx"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/database"
|
|
|
|
"github.com/stashapp/stash/pkg/logger"
|
|
|
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
2020-04-24 02:52:21 +00:00
|
|
|
"github.com/stashapp/stash/pkg/manager/paths"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/models"
|
|
|
|
"github.com/stashapp/stash/pkg/utils"
|
2019-02-09 12:30:49 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
type ExportTask struct {
|
|
|
|
Mappings *jsonschema.Mappings
|
2019-02-14 22:53:32 +00:00
|
|
|
Scraped []jsonschema.ScrapedItem
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
2019-02-10 20:15:36 +00:00
|
|
|
defer wg.Done()
|
2020-03-10 03:28:15 +00:00
|
|
|
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count
|
2020-04-24 02:52:21 +00:00
|
|
|
workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
t.Mappings = &jsonschema.Mappings{}
|
|
|
|
t.Scraped = []jsonschema.ScrapedItem{}
|
|
|
|
|
|
|
|
ctx := context.TODO()
|
2020-04-24 02:52:21 +00:00
|
|
|
startTime := time.Now()
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
paths.EnsureJSONDirs()
|
|
|
|
|
|
|
|
t.ExportScenes(ctx, workerCount)
|
2019-02-09 12:30:49 +00:00
|
|
|
t.ExportGalleries(ctx)
|
2020-04-24 02:52:21 +00:00
|
|
|
t.ExportPerformers(ctx, workerCount)
|
|
|
|
t.ExportStudios(ctx, workerCount)
|
|
|
|
t.ExportMovies(ctx, workerCount)
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
if err := instance.JSON.saveMappings(t.Mappings); err != nil {
|
|
|
|
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
t.ExportScrapedItems(ctx)
|
2020-04-24 02:52:21 +00:00
|
|
|
logger.Infof("Export complete in %s.", time.Since(startTime))
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
func (t *ExportTask) ExportScenes(ctx context.Context, workers int) {
|
|
|
|
var scenesWg sync.WaitGroup
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
qb := models.NewSceneQueryBuilder()
|
2020-04-24 02:52:21 +00:00
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
scenes, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] failed to fetch all scenes: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh := make(chan *models.Scene, workers*2) // make a buffered channel to feed workers
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
logger.Info("[scenes] exporting")
|
2020-04-24 02:52:21 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
|
|
|
|
for w := 0; w < workers; w++ { // create export Scene workers
|
|
|
|
scenesWg.Add(1)
|
|
|
|
go exportScene(&scenesWg, jobCh, t, nil) // no db data is changed so tx is set to nil
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
for i, scene := range scenes {
|
|
|
|
index := i + 1
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
if (i % 100) == 0 { // make progress easier to read
|
|
|
|
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.Checksum})
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh <- scene // feed workers
|
|
|
|
}
|
|
|
|
|
|
|
|
close(jobCh) // close channel so that workers will know no more jobs are available
|
|
|
|
scenesWg.Wait()
|
|
|
|
|
|
|
|
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
|
|
|
}
|
|
|
|
func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask, tx *sqlx.Tx) {
|
|
|
|
defer wg.Done()
|
2020-06-22 23:19:19 +00:00
|
|
|
sceneQB := models.NewSceneQueryBuilder()
|
2020-04-24 02:52:21 +00:00
|
|
|
studioQB := models.NewStudioQueryBuilder()
|
|
|
|
movieQB := models.NewMovieQueryBuilder()
|
|
|
|
galleryQB := models.NewGalleryQueryBuilder()
|
|
|
|
performerQB := models.NewPerformerQueryBuilder()
|
|
|
|
tagQB := models.NewTagQueryBuilder()
|
|
|
|
sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
|
|
|
|
joinQB := models.NewJoinsQueryBuilder()
|
|
|
|
|
|
|
|
for scene := range jobChan {
|
2019-03-27 19:53:15 +00:00
|
|
|
newSceneJSON := jsonschema.Scene{
|
|
|
|
CreatedAt: models.JSONTime{Time: scene.CreatedAt.Timestamp},
|
|
|
|
UpdatedAt: models.JSONTime{Time: scene.UpdatedAt.Timestamp},
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
var studioName string
|
|
|
|
if scene.StudioID.Valid {
|
|
|
|
studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx)
|
|
|
|
if studio != nil {
|
|
|
|
studioName = studio.Name.String
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var galleryChecksum string
|
|
|
|
gallery, _ := galleryQB.FindBySceneID(scene.ID, tx)
|
|
|
|
if gallery != nil {
|
|
|
|
galleryChecksum = gallery.Checksum
|
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
performers, _ := performerQB.FindNameBySceneID(scene.ID, tx)
|
2020-03-10 03:28:15 +00:00
|
|
|
sceneMovies, _ := joinQB.GetSceneMovies(scene.ID, tx)
|
2019-02-09 12:30:49 +00:00
|
|
|
tags, _ := tagQB.FindBySceneID(scene.ID, tx)
|
|
|
|
sceneMarkers, _ := sceneMarkerQB.FindBySceneID(scene.ID, tx)
|
|
|
|
|
|
|
|
if scene.Title.Valid {
|
|
|
|
newSceneJSON.Title = scene.Title.String
|
|
|
|
}
|
|
|
|
if studioName != "" {
|
|
|
|
newSceneJSON.Studio = studioName
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if scene.URL.Valid {
|
|
|
|
newSceneJSON.URL = scene.URL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if scene.Date.Valid {
|
|
|
|
newSceneJSON.Date = utils.GetYMDFromDatabaseDate(scene.Date.String)
|
|
|
|
}
|
|
|
|
if scene.Rating.Valid {
|
|
|
|
newSceneJSON.Rating = int(scene.Rating.Int64)
|
|
|
|
}
|
2020-04-22 23:14:58 +00:00
|
|
|
|
|
|
|
newSceneJSON.OCounter = scene.OCounter
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
if scene.Details.Valid {
|
|
|
|
newSceneJSON.Details = scene.Details.String
|
|
|
|
}
|
|
|
|
if galleryChecksum != "" {
|
|
|
|
newSceneJSON.Gallery = galleryChecksum
|
|
|
|
}
|
|
|
|
|
|
|
|
newSceneJSON.Performers = t.getPerformerNames(performers)
|
|
|
|
newSceneJSON.Tags = t.getTagNames(tags)
|
|
|
|
|
|
|
|
for _, sceneMarker := range sceneMarkers {
|
2019-04-20 17:32:01 +00:00
|
|
|
primaryTag, err := tagQB.Find(sceneMarker.PrimaryTagID, tx)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", scene.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
2019-02-09 12:30:49 +00:00
|
|
|
logger.Errorf("[scenes] invalid scene marker: %v", sceneMarker)
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneMarkerJSON := jsonschema.SceneMarker{
|
2019-02-14 22:53:32 +00:00
|
|
|
Title: sceneMarker.Title,
|
|
|
|
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
2019-02-09 12:30:49 +00:00
|
|
|
PrimaryTag: primaryTag.Name,
|
2019-02-14 22:53:32 +00:00
|
|
|
Tags: t.getTagNames(sceneMarkerTags),
|
2019-03-27 19:53:15 +00:00
|
|
|
CreatedAt: models.JSONTime{Time: sceneMarker.CreatedAt.Timestamp},
|
|
|
|
UpdatedAt: models.JSONTime{Time: sceneMarker.UpdatedAt.Timestamp},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
|
|
|
|
}
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
for _, sceneMovie := range sceneMovies {
|
|
|
|
movie, _ := movieQB.Find(sceneMovie.MovieID, tx)
|
|
|
|
|
|
|
|
if movie.Name.Valid {
|
|
|
|
sceneMovieJSON := jsonschema.SceneMovie{
|
|
|
|
MovieName: movie.Name.String,
|
2020-04-22 01:22:14 +00:00
|
|
|
SceneIndex: int(sceneMovie.SceneIndex.Int64),
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
newSceneJSON.Movies = append(newSceneJSON.Movies, sceneMovieJSON)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
newSceneJSON.File = &jsonschema.SceneFile{}
|
|
|
|
if scene.Size.Valid {
|
|
|
|
newSceneJSON.File.Size = scene.Size.String
|
|
|
|
}
|
|
|
|
if scene.Duration.Valid {
|
|
|
|
newSceneJSON.File.Duration = t.getDecimalString(scene.Duration.Float64)
|
|
|
|
}
|
|
|
|
if scene.VideoCodec.Valid {
|
|
|
|
newSceneJSON.File.VideoCodec = scene.VideoCodec.String
|
|
|
|
}
|
|
|
|
if scene.AudioCodec.Valid {
|
|
|
|
newSceneJSON.File.AudioCodec = scene.AudioCodec.String
|
|
|
|
}
|
2020-04-09 22:38:34 +00:00
|
|
|
if scene.Format.Valid {
|
|
|
|
newSceneJSON.File.Format = scene.Format.String
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
if scene.Width.Valid {
|
|
|
|
newSceneJSON.File.Width = int(scene.Width.Int64)
|
|
|
|
}
|
|
|
|
if scene.Height.Valid {
|
|
|
|
newSceneJSON.File.Height = int(scene.Height.Int64)
|
|
|
|
}
|
|
|
|
if scene.Framerate.Valid {
|
|
|
|
newSceneJSON.File.Framerate = t.getDecimalString(scene.Framerate.Float64)
|
|
|
|
}
|
|
|
|
if scene.Bitrate.Valid {
|
|
|
|
newSceneJSON.File.Bitrate = int(scene.Bitrate.Int64)
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
cover, err := sceneQB.GetSceneCover(scene.ID, tx)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> error getting scene cover: %s", scene.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(cover) > 0 {
|
|
|
|
newSceneJSON.Cover = utils.GetBase64StringFromData(cover)
|
2019-12-31 22:38:49 +00:00
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
sceneJSON, err := instance.JSON.getScene(scene.Checksum)
|
|
|
|
if err != nil {
|
2019-02-11 14:41:59 +00:00
|
|
|
logger.Debugf("[scenes] error reading scene json: %s", err.Error())
|
|
|
|
} else if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) {
|
2019-02-09 12:30:49 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := instance.JSON.saveScene(scene.Checksum, &newSceneJSON); err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to save json: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ExportTask) ExportGalleries(ctx context.Context) {
|
|
|
|
qb := models.NewGalleryQueryBuilder()
|
|
|
|
galleries, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[galleries] failed to fetch all galleries: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[galleries] exporting")
|
|
|
|
|
|
|
|
for i, gallery := range galleries {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
2019-02-14 22:53:32 +00:00
|
|
|
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{Path: gallery.Path, Checksum: gallery.Checksum})
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
logger.Infof("[galleries] export complete")
|
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
func (t *ExportTask) ExportPerformers(ctx context.Context, workers int) {
|
|
|
|
var performersWg sync.WaitGroup
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
qb := models.NewPerformerQueryBuilder()
|
|
|
|
performers, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[performers] failed to fetch all performers: %s", err.Error())
|
|
|
|
}
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh := make(chan *models.Performer, workers*2) // make a buffered channel to feed workers
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
logger.Info("[performers] exporting")
|
2020-04-24 02:52:21 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
|
|
|
|
for w := 0; w < workers; w++ { // create export Performer workers
|
|
|
|
performersWg.Add(1)
|
|
|
|
go exportPerformer(&performersWg, jobCh)
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
for i, performer := range performers {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[performers] %d of %d", index, len(performers))
|
|
|
|
|
2019-02-14 22:53:32 +00:00
|
|
|
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh <- performer // feed workers
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
close(jobCh) // close channel so workers will know that no more jobs are available
|
|
|
|
performersWg.Wait()
|
|
|
|
|
|
|
|
logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
|
|
|
}
|
|
|
|
|
|
|
|
func exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer) {
|
|
|
|
defer wg.Done()
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
performerQB := models.NewPerformerQueryBuilder()
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
for performer := range jobChan {
|
2019-03-27 19:53:15 +00:00
|
|
|
newPerformerJSON := jsonschema.Performer{
|
|
|
|
CreatedAt: models.JSONTime{Time: performer.CreatedAt.Timestamp},
|
|
|
|
UpdatedAt: models.JSONTime{Time: performer.UpdatedAt.Timestamp},
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
if performer.Name.Valid {
|
|
|
|
newPerformerJSON.Name = performer.Name.String
|
|
|
|
}
|
2020-03-31 22:36:38 +00:00
|
|
|
if performer.Gender.Valid {
|
|
|
|
newPerformerJSON.Gender = performer.Gender.String
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if performer.URL.Valid {
|
|
|
|
newPerformerJSON.URL = performer.URL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performer.Birthdate.Valid {
|
|
|
|
newPerformerJSON.Birthdate = utils.GetYMDFromDatabaseDate(performer.Birthdate.String)
|
|
|
|
}
|
|
|
|
if performer.Ethnicity.Valid {
|
|
|
|
newPerformerJSON.Ethnicity = performer.Ethnicity.String
|
|
|
|
}
|
|
|
|
if performer.Country.Valid {
|
|
|
|
newPerformerJSON.Country = performer.Country.String
|
|
|
|
}
|
|
|
|
if performer.EyeColor.Valid {
|
|
|
|
newPerformerJSON.EyeColor = performer.EyeColor.String
|
|
|
|
}
|
|
|
|
if performer.Height.Valid {
|
|
|
|
newPerformerJSON.Height = performer.Height.String
|
|
|
|
}
|
|
|
|
if performer.Measurements.Valid {
|
|
|
|
newPerformerJSON.Measurements = performer.Measurements.String
|
|
|
|
}
|
|
|
|
if performer.FakeTits.Valid {
|
|
|
|
newPerformerJSON.FakeTits = performer.FakeTits.String
|
|
|
|
}
|
|
|
|
if performer.CareerLength.Valid {
|
|
|
|
newPerformerJSON.CareerLength = performer.CareerLength.String
|
|
|
|
}
|
|
|
|
if performer.Tattoos.Valid {
|
|
|
|
newPerformerJSON.Tattoos = performer.Tattoos.String
|
|
|
|
}
|
|
|
|
if performer.Piercings.Valid {
|
|
|
|
newPerformerJSON.Piercings = performer.Piercings.String
|
|
|
|
}
|
|
|
|
if performer.Aliases.Valid {
|
|
|
|
newPerformerJSON.Aliases = performer.Aliases.String
|
|
|
|
}
|
|
|
|
if performer.Twitter.Valid {
|
|
|
|
newPerformerJSON.Twitter = performer.Twitter.String
|
|
|
|
}
|
|
|
|
if performer.Instagram.Valid {
|
|
|
|
newPerformerJSON.Instagram = performer.Instagram.String
|
|
|
|
}
|
|
|
|
if performer.Favorite.Valid {
|
|
|
|
newPerformerJSON.Favorite = performer.Favorite.Bool
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
image, err := performerQB.GetPerformerImage(performer.ID, nil)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[performers] <%s> error getting performers image: %s", performer.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(image) > 0 {
|
|
|
|
newPerformerJSON.Image = utils.GetBase64StringFromData(image)
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
performerJSON, err := instance.JSON.getPerformer(performer.Checksum)
|
|
|
|
if err != nil {
|
2019-02-11 14:41:59 +00:00
|
|
|
logger.Debugf("[performers] error reading performer json: %s", err.Error())
|
|
|
|
} else if jsonschema.CompareJSON(*performerJSON, newPerformerJSON) {
|
2019-02-09 12:30:49 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := instance.JSON.savePerformer(performer.Checksum, &newPerformerJSON); err != nil {
|
|
|
|
logger.Errorf("[performers] <%s> failed to save json: %s", performer.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
func (t *ExportTask) ExportStudios(ctx context.Context, workers int) {
|
|
|
|
var studiosWg sync.WaitGroup
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
qb := models.NewStudioQueryBuilder()
|
|
|
|
studios, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[studios] failed to fetch all studios: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[studios] exporting")
|
2020-04-24 02:52:21 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
|
|
|
|
jobCh := make(chan *models.Studio, workers*2) // make a buffered channel to feed workers
|
|
|
|
|
|
|
|
for w := 0; w < workers; w++ { // create export Studio workers
|
|
|
|
studiosWg.Add(1)
|
|
|
|
go exportStudio(&studiosWg, jobCh)
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
for i, studio := range studios {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[studios] %d of %d", index, len(studios))
|
|
|
|
|
2019-02-14 22:53:32 +00:00
|
|
|
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh <- studio // feed workers
|
|
|
|
}
|
|
|
|
|
|
|
|
close(jobCh)
|
|
|
|
studiosWg.Wait()
|
|
|
|
|
|
|
|
logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
|
|
|
}
|
|
|
|
|
|
|
|
func exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Studio) {
|
|
|
|
defer wg.Done()
|
|
|
|
|
2020-06-15 11:34:39 +00:00
|
|
|
studioQB := models.NewStudioQueryBuilder()
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
for studio := range jobChan {
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2019-03-27 19:53:15 +00:00
|
|
|
newStudioJSON := jsonschema.Studio{
|
|
|
|
CreatedAt: models.JSONTime{Time: studio.CreatedAt.Timestamp},
|
|
|
|
UpdatedAt: models.JSONTime{Time: studio.UpdatedAt.Timestamp},
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
if studio.Name.Valid {
|
|
|
|
newStudioJSON.Name = studio.Name.String
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if studio.URL.Valid {
|
|
|
|
newStudioJSON.URL = studio.URL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
2020-06-15 11:34:39 +00:00
|
|
|
if studio.ParentID.Valid {
|
|
|
|
parent, _ := studioQB.Find(int(studio.ParentID.Int64), nil)
|
|
|
|
if parent != nil {
|
|
|
|
newStudioJSON.ParentStudio = parent.Name.String
|
|
|
|
}
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
image, err := studioQB.GetStudioImage(studio.ID, nil)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[studios] <%s> error getting studio image: %s", studio.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(image) > 0 {
|
|
|
|
newStudioJSON.Image = utils.GetBase64StringFromData(image)
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
studioJSON, err := instance.JSON.getStudio(studio.Checksum)
|
|
|
|
if err != nil {
|
2019-02-11 14:41:59 +00:00
|
|
|
logger.Debugf("[studios] error reading studio json: %s", err.Error())
|
|
|
|
} else if jsonschema.CompareJSON(*studioJSON, newStudioJSON) {
|
2019-02-09 12:30:49 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := instance.JSON.saveStudio(studio.Checksum, &newStudioJSON); err != nil {
|
|
|
|
logger.Errorf("[studios] <%s> failed to save json: %s", studio.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
func (t *ExportTask) ExportMovies(ctx context.Context, workers int) {
|
|
|
|
var moviesWg sync.WaitGroup
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
qb := models.NewMovieQueryBuilder()
|
|
|
|
movies, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[movies] failed to fetch all movies: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[movies] exporting")
|
2020-04-24 02:52:21 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
|
|
|
|
jobCh := make(chan *models.Movie, workers*2) // make a buffered channel to feed workers
|
|
|
|
|
|
|
|
for w := 0; w < workers; w++ { // create export Studio workers
|
|
|
|
moviesWg.Add(1)
|
|
|
|
go exportMovie(&moviesWg, jobCh)
|
|
|
|
}
|
2020-03-10 03:28:15 +00:00
|
|
|
|
|
|
|
for i, movie := range movies {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[movies] %d of %d", index, len(movies))
|
|
|
|
|
|
|
|
t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.NameMapping{Name: movie.Name.String, Checksum: movie.Checksum})
|
2020-04-24 02:52:21 +00:00
|
|
|
jobCh <- movie // feed workers
|
|
|
|
}
|
|
|
|
|
|
|
|
close(jobCh)
|
|
|
|
moviesWg.Wait()
|
|
|
|
|
|
|
|
logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
2020-03-10 03:28:15 +00:00
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
}
|
|
|
|
func exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie) {
|
|
|
|
defer wg.Done()
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
movieQB := models.NewMovieQueryBuilder()
|
|
|
|
studioQB := models.NewStudioQueryBuilder()
|
|
|
|
|
2020-04-24 02:52:21 +00:00
|
|
|
for movie := range jobChan {
|
2020-03-10 03:28:15 +00:00
|
|
|
newMovieJSON := jsonschema.Movie{
|
|
|
|
CreatedAt: models.JSONTime{Time: movie.CreatedAt.Timestamp},
|
|
|
|
UpdatedAt: models.JSONTime{Time: movie.UpdatedAt.Timestamp},
|
|
|
|
}
|
|
|
|
|
|
|
|
if movie.Name.Valid {
|
|
|
|
newMovieJSON.Name = movie.Name.String
|
|
|
|
}
|
|
|
|
if movie.Aliases.Valid {
|
|
|
|
newMovieJSON.Aliases = movie.Aliases.String
|
|
|
|
}
|
|
|
|
if movie.Date.Valid {
|
|
|
|
newMovieJSON.Date = utils.GetYMDFromDatabaseDate(movie.Date.String)
|
|
|
|
}
|
|
|
|
if movie.Rating.Valid {
|
2020-04-22 01:22:14 +00:00
|
|
|
newMovieJSON.Rating = int(movie.Rating.Int64)
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
if movie.Duration.Valid {
|
2020-04-22 01:22:14 +00:00
|
|
|
newMovieJSON.Duration = int(movie.Duration.Int64)
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if movie.Director.Valid {
|
|
|
|
newMovieJSON.Director = movie.Director.String
|
|
|
|
}
|
|
|
|
|
|
|
|
if movie.Synopsis.Valid {
|
|
|
|
newMovieJSON.Synopsis = movie.Synopsis.String
|
|
|
|
}
|
|
|
|
|
|
|
|
if movie.URL.Valid {
|
|
|
|
newMovieJSON.URL = movie.URL.String
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
if movie.StudioID.Valid {
|
|
|
|
studio, _ := studioQB.Find(int(movie.StudioID.Int64), nil)
|
|
|
|
if studio != nil {
|
|
|
|
newMovieJSON.Studio = studio.Name.String
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
frontImage, err := movieQB.GetFrontImage(movie.ID, nil)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[movies] <%s> error getting movie front image: %s", movie.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(frontImage) > 0 {
|
|
|
|
newMovieJSON.FrontImage = utils.GetBase64StringFromData(frontImage)
|
|
|
|
}
|
|
|
|
|
|
|
|
backImage, err := movieQB.GetBackImage(movie.ID, nil)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[movies] <%s> error getting movie back image: %s", movie.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(backImage) > 0 {
|
|
|
|
newMovieJSON.BackImage = utils.GetBase64StringFromData(backImage)
|
|
|
|
}
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
movieJSON, err := instance.JSON.getMovie(movie.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debugf("[movies] error reading movie json: %s", err.Error())
|
|
|
|
} else if jsonschema.CompareJSON(*movieJSON, newMovieJSON) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := instance.JSON.saveMovie(movie.Checksum, &newMovieJSON); err != nil {
|
|
|
|
logger.Errorf("[movies] <%s> failed to save json: %s", movie.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
2019-02-10 20:47:34 +00:00
|
|
|
defer tx.Commit()
|
2019-02-09 12:30:49 +00:00
|
|
|
qb := models.NewScrapedItemQueryBuilder()
|
|
|
|
sqb := models.NewStudioQueryBuilder()
|
|
|
|
scrapedItems, err := qb.All()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[scraped sites] exporting")
|
|
|
|
|
|
|
|
for i, scrapedItem := range scrapedItems {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[scraped sites] %d of %d", index, len(scrapedItems))
|
|
|
|
|
|
|
|
var studioName string
|
|
|
|
if scrapedItem.StudioID.Valid {
|
|
|
|
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64), tx)
|
|
|
|
if studio != nil {
|
|
|
|
studioName = studio.Name.String
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
newScrapedItemJSON := jsonschema.ScrapedItem{}
|
|
|
|
|
|
|
|
if scrapedItem.Title.Valid {
|
|
|
|
newScrapedItemJSON.Title = scrapedItem.Title.String
|
|
|
|
}
|
|
|
|
if scrapedItem.Description.Valid {
|
|
|
|
newScrapedItemJSON.Description = scrapedItem.Description.String
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if scrapedItem.URL.Valid {
|
|
|
|
newScrapedItemJSON.URL = scrapedItem.URL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if scrapedItem.Date.Valid {
|
|
|
|
newScrapedItemJSON.Date = utils.GetYMDFromDatabaseDate(scrapedItem.Date.String)
|
|
|
|
}
|
|
|
|
if scrapedItem.Rating.Valid {
|
|
|
|
newScrapedItemJSON.Rating = scrapedItem.Rating.String
|
|
|
|
}
|
|
|
|
if scrapedItem.Tags.Valid {
|
|
|
|
newScrapedItemJSON.Tags = scrapedItem.Tags.String
|
|
|
|
}
|
|
|
|
if scrapedItem.Models.Valid {
|
|
|
|
newScrapedItemJSON.Models = scrapedItem.Models.String
|
|
|
|
}
|
|
|
|
if scrapedItem.Episode.Valid {
|
|
|
|
newScrapedItemJSON.Episode = int(scrapedItem.Episode.Int64)
|
|
|
|
}
|
|
|
|
if scrapedItem.GalleryFilename.Valid {
|
|
|
|
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if scrapedItem.GalleryURL.Valid {
|
|
|
|
newScrapedItemJSON.GalleryURL = scrapedItem.GalleryURL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if scrapedItem.VideoFilename.Valid {
|
|
|
|
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if scrapedItem.VideoURL.Valid {
|
|
|
|
newScrapedItemJSON.VideoURL = scrapedItem.VideoURL.String
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
newScrapedItemJSON.Studio = studioName
|
2019-03-09 18:14:55 +00:00
|
|
|
updatedAt := models.JSONTime{Time: scrapedItem.UpdatedAt.Timestamp} // TODO keeping ruby format
|
2019-02-09 12:30:49 +00:00
|
|
|
newScrapedItemJSON.UpdatedAt = updatedAt
|
|
|
|
|
|
|
|
t.Scraped = append(t.Scraped, newScrapedItemJSON)
|
|
|
|
}
|
|
|
|
|
|
|
|
scrapedJSON, err := instance.JSON.getScraped()
|
|
|
|
if err != nil {
|
2019-02-11 14:41:59 +00:00
|
|
|
logger.Debugf("[scraped sites] error reading json: %s", err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if !jsonschema.CompareJSON(scrapedJSON, t.Scraped) {
|
|
|
|
if err := instance.JSON.saveScaped(t.Scraped); err != nil {
|
|
|
|
logger.Errorf("[scraped sites] failed to save json: %s", err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Infof("[scraped sites] export complete")
|
|
|
|
}
|
|
|
|
|
2019-05-27 19:34:26 +00:00
|
|
|
func (t *ExportTask) getPerformerNames(performers []*models.Performer) []string {
|
2019-02-09 12:30:49 +00:00
|
|
|
if len(performers) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var results []string
|
|
|
|
for _, performer := range performers {
|
|
|
|
if performer.Name.Valid {
|
|
|
|
results = append(results, performer.Name.String)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return results
|
|
|
|
}
|
|
|
|
|
2019-05-27 19:34:26 +00:00
|
|
|
func (t *ExportTask) getTagNames(tags []*models.Tag) []string {
|
2019-02-09 12:30:49 +00:00
|
|
|
if len(tags) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var results []string
|
|
|
|
for _, tag := range tags {
|
|
|
|
if tag.Name != "" {
|
|
|
|
results = append(results, tag.Name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return results
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ExportTask) getDecimalString(num float64) string {
|
|
|
|
if num == 0 {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
|
|
|
precision := getPrecision(num)
|
|
|
|
if precision == 0 {
|
|
|
|
precision = 1
|
|
|
|
}
|
|
|
|
return fmt.Sprintf("%."+strconv.Itoa(precision)+"f", num)
|
|
|
|
}
|
|
|
|
|
|
|
|
func getPrecision(num float64) int {
|
|
|
|
if num == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
e := 1.0
|
|
|
|
p := 0
|
2019-02-14 22:53:32 +00:00
|
|
|
for (math.Round(num*e) / e) != num {
|
2019-02-09 12:30:49 +00:00
|
|
|
e *= 10
|
2019-02-14 22:53:32 +00:00
|
|
|
p++
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
return p
|
2019-02-14 22:53:32 +00:00
|
|
|
}
|