2019-02-09 12:30:49 +00:00
|
|
|
package manager
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"database/sql"
|
2020-06-15 11:34:39 +00:00
|
|
|
"fmt"
|
2019-11-17 21:39:33 +00:00
|
|
|
"strconv"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
"github.com/jmoiron/sqlx"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/database"
|
|
|
|
"github.com/stashapp/stash/pkg/logger"
|
2019-03-23 14:56:59 +00:00
|
|
|
"github.com/stashapp/stash/pkg/manager/config"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
|
|
|
"github.com/stashapp/stash/pkg/models"
|
|
|
|
"github.com/stashapp/stash/pkg/utils"
|
2019-02-09 12:30:49 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
type ImportTask struct {
|
|
|
|
Mappings *jsonschema.Mappings
|
2019-02-14 22:53:32 +00:00
|
|
|
Scraped []jsonschema.ScrapedItem
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
2019-02-10 20:15:36 +00:00
|
|
|
defer wg.Done()
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
t.Mappings, _ = instance.JSON.getMappings()
|
|
|
|
if t.Mappings == nil {
|
2019-02-10 20:15:36 +00:00
|
|
|
logger.Error("missing mappings json")
|
|
|
|
return
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
scraped, _ := instance.JSON.getScraped()
|
|
|
|
if scraped == nil {
|
|
|
|
logger.Warn("missing scraped json")
|
|
|
|
}
|
|
|
|
t.Scraped = scraped
|
|
|
|
|
2019-11-17 21:39:33 +00:00
|
|
|
err := database.Reset(config.GetDatabasePath())
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error resetting database: %s", err.Error())
|
|
|
|
return
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
ctx := context.TODO()
|
|
|
|
|
|
|
|
t.ImportPerformers(ctx)
|
|
|
|
t.ImportStudios(ctx)
|
2020-03-10 03:28:15 +00:00
|
|
|
t.ImportMovies(ctx)
|
2019-02-09 12:30:49 +00:00
|
|
|
t.ImportGalleries(ctx)
|
|
|
|
t.ImportTags(ctx)
|
|
|
|
|
|
|
|
t.ImportScrapedItems(ctx)
|
|
|
|
t.ImportScenes(ctx)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewPerformerQueryBuilder()
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Performers {
|
|
|
|
index := i + 1
|
|
|
|
performerJSON, err := instance.JSON.getPerformer(mappingJSON.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil {
|
|
|
|
return
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers))
|
|
|
|
|
2019-08-22 02:43:24 +00:00
|
|
|
// generate checksum from performer name rather than image
|
|
|
|
checksum := utils.MD5FromString(performerJSON.Name)
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
// Process the base 64 encoded image string
|
2019-08-22 02:43:24 +00:00
|
|
|
_, imageData, err := utils.ProcessBase64Image(performerJSON.Image)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[performers] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Populate a new performer from the input
|
|
|
|
newPerformer := models.Performer{
|
2019-02-14 22:53:32 +00:00
|
|
|
Checksum: checksum,
|
|
|
|
Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true},
|
2019-03-27 19:53:15 +00:00
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(performerJSON.CreatedAt)},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(performerJSON.UpdatedAt)},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if performerJSON.Name != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
2020-03-31 22:36:38 +00:00
|
|
|
if performerJSON.Gender != "" {
|
|
|
|
newPerformer.Gender = sql.NullString{String: performerJSON.Gender, Valid: true}
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if performerJSON.URL != "" {
|
|
|
|
newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Birthdate != "" {
|
2019-03-05 01:14:52 +00:00
|
|
|
newPerformer.Birthdate = models.SQLiteDate{String: performerJSON.Birthdate, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Ethnicity != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Country != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.EyeColor != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Height != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Measurements != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.FakeTits != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.CareerLength != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Tattoos != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Piercings != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Aliases != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Twitter != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if performerJSON.Instagram != "" {
|
2019-02-14 22:53:32 +00:00
|
|
|
newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
createdPerformer, err := qb.Create(newPerformer, tx)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-06-22 23:19:19 +00:00
|
|
|
|
|
|
|
// Add the performer image if set
|
|
|
|
if len(imageData) > 0 {
|
|
|
|
if err := qb.UpdatePerformerImage(createdPerformer.ID, imageData, tx); err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[performers] <%s> error setting performer image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[performers] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[performers] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[performers] import complete")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) ImportStudios(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
2020-06-15 11:34:39 +00:00
|
|
|
|
|
|
|
pendingParent := make(map[string][]*jsonschema.Studio)
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Studios {
|
|
|
|
index := i + 1
|
|
|
|
studioJSON, err := instance.JSON.getStudio(mappingJSON.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil {
|
|
|
|
return
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios))
|
|
|
|
|
2020-06-15 11:34:39 +00:00
|
|
|
if err := t.ImportStudio(studioJSON, pendingParent, tx); err != nil {
|
|
|
|
tx.Rollback()
|
|
|
|
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
return
|
|
|
|
}
|
2020-06-15 11:34:39 +00:00
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2020-06-15 11:34:39 +00:00
|
|
|
// create the leftover studios, warning for missing parents
|
|
|
|
if len(pendingParent) > 0 {
|
|
|
|
logger.Warnf("[studios] importing studios with missing parents")
|
|
|
|
|
|
|
|
for _, s := range pendingParent {
|
|
|
|
for _, orphanStudioJSON := range s {
|
|
|
|
if err := t.ImportStudio(orphanStudioJSON, nil, tx); err != nil {
|
|
|
|
tx.Rollback()
|
|
|
|
logger.Errorf("[studios] <%s> failed to create: %s", orphanStudioJSON.Name, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[studios] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[studios] import complete")
|
|
|
|
}
|
|
|
|
|
2020-06-15 11:34:39 +00:00
|
|
|
func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, tx *sqlx.Tx) error {
|
|
|
|
qb := models.NewStudioQueryBuilder()
|
|
|
|
|
|
|
|
// generate checksum from studio name rather than image
|
|
|
|
checksum := utils.MD5FromString(studioJSON.Name)
|
|
|
|
|
|
|
|
// Process the base 64 encoded image string
|
|
|
|
_, imageData, err := utils.ProcessBase64Image(studioJSON.Image)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("invalid image: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Populate a new studio from the input
|
|
|
|
newStudio := models.Studio{
|
|
|
|
Checksum: checksum,
|
|
|
|
Name: sql.NullString{String: studioJSON.Name, Valid: true},
|
|
|
|
URL: sql.NullString{String: studioJSON.URL, Valid: true},
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(studioJSON.CreatedAt)},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(studioJSON.UpdatedAt)},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Populate the parent ID
|
|
|
|
if studioJSON.ParentStudio != "" {
|
|
|
|
studio, err := qb.FindByName(studioJSON.ParentStudio, tx, false)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error finding studio by name <%s>: %s", studioJSON.ParentStudio, err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
if studio == nil {
|
|
|
|
// its possible that the parent hasn't been created yet
|
|
|
|
// do it after it is created
|
|
|
|
if pendingParent == nil {
|
|
|
|
logger.Warnf("[studios] studio <%s> does not exist", studioJSON.ParentStudio)
|
|
|
|
} else {
|
|
|
|
// add to the pending parent list so that it is created after the parent
|
|
|
|
s := pendingParent[studioJSON.ParentStudio]
|
|
|
|
s = append(s, studioJSON)
|
|
|
|
pendingParent[studioJSON.ParentStudio] = s
|
|
|
|
|
|
|
|
// skip
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
newStudio.ParentID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
createdStudio, err := qb.Create(newStudio, tx)
|
2020-06-15 11:34:39 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
if len(imageData) > 0 {
|
|
|
|
if err := qb.UpdateStudioImage(createdStudio.ID, imageData, tx); err != nil {
|
|
|
|
return fmt.Errorf("error setting studio image: %s", err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-15 11:34:39 +00:00
|
|
|
// now create the studios pending this studios creation
|
|
|
|
s := pendingParent[studioJSON.Name]
|
|
|
|
for _, childStudioJSON := range s {
|
|
|
|
// map is nil since we're not checking parent studios at this point
|
|
|
|
if err := t.ImportStudio(childStudioJSON, nil, tx); err != nil {
|
|
|
|
return fmt.Errorf("failed to create child studio <%s>: %s", childStudioJSON.Name, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// delete the entry from the map so that we know its not left over
|
|
|
|
delete(pendingParent, studioJSON.Name)
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
func (t *ImportTask) ImportMovies(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewMovieQueryBuilder()
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Movies {
|
|
|
|
index := i + 1
|
|
|
|
movieJSON, err := instance.JSON.getMovie(mappingJSON.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[movies] failed to read json: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || movieJSON == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Progressf("[movies] %d of %d", index, len(t.Mappings.Movies))
|
|
|
|
|
|
|
|
// generate checksum from movie name rather than image
|
|
|
|
checksum := utils.MD5FromString(movieJSON.Name)
|
|
|
|
|
|
|
|
// Process the base 64 encoded image string
|
|
|
|
_, frontimageData, err := utils.ProcessBase64Image(movieJSON.FrontImage)
|
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[movies] <%s> invalid front_image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
_, backimageData, err := utils.ProcessBase64Image(movieJSON.BackImage)
|
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[movies] <%s> invalid back_image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Populate a new movie from the input
|
|
|
|
newMovie := models.Movie{
|
2020-06-22 23:19:19 +00:00
|
|
|
Checksum: checksum,
|
|
|
|
Name: sql.NullString{String: movieJSON.Name, Valid: true},
|
|
|
|
Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true},
|
|
|
|
Date: models.SQLiteDate{String: movieJSON.Date, Valid: true},
|
|
|
|
Director: sql.NullString{String: movieJSON.Director, Valid: true},
|
|
|
|
Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true},
|
|
|
|
URL: sql.NullString{String: movieJSON.URL, Valid: true},
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.CreatedAt)},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.UpdatedAt)},
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
|
2020-04-22 01:22:14 +00:00
|
|
|
if movieJSON.Rating != 0 {
|
|
|
|
newMovie.Rating = sql.NullInt64{Int64: int64(movieJSON.Rating), Valid: true}
|
|
|
|
}
|
|
|
|
if movieJSON.Duration != 0 {
|
|
|
|
newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true}
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
// Populate the studio ID
|
|
|
|
if movieJSON.Studio != "" {
|
|
|
|
sqb := models.NewStudioQueryBuilder()
|
|
|
|
studio, err := sqb.FindByName(movieJSON.Studio, tx, false)
|
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("[movies] error getting studio <%s>: %s", movieJSON.Studio, err.Error())
|
|
|
|
} else if studio == nil {
|
|
|
|
logger.Warnf("[movies] studio <%s> does not exist", movieJSON.Studio)
|
|
|
|
} else {
|
|
|
|
newMovie.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
createdMovie, err := qb.Create(newMovie, tx)
|
2020-03-10 03:28:15 +00:00
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[movies] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-06-22 23:19:19 +00:00
|
|
|
|
|
|
|
// Add the performer image if set
|
|
|
|
if len(frontimageData) > 0 {
|
|
|
|
if err := qb.UpdateMovieImages(createdMovie.ID, frontimageData, backimageData, tx); err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[movies] <%s> error setting movie images: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[movies] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[movies] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[movies] import complete")
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewGalleryQueryBuilder()
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Galleries {
|
|
|
|
index := i + 1
|
2019-02-14 22:53:32 +00:00
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
|
|
|
return
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries))
|
|
|
|
|
|
|
|
// Populate a new gallery from the input
|
|
|
|
currentTime := time.Now()
|
|
|
|
newGallery := models.Gallery{
|
2019-02-14 22:53:32 +00:00
|
|
|
Checksum: mappingJSON.Checksum,
|
|
|
|
Path: mappingJSON.Path,
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
_, err := qb.Create(newGallery, tx)
|
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[galleries] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[galleries] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[galleries] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[galleries] import complete")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) ImportTags(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewTagQueryBuilder()
|
|
|
|
|
|
|
|
var tagNames []string
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Scenes {
|
|
|
|
index := i + 1
|
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
2019-02-10 20:15:36 +00:00
|
|
|
_ = tx.Rollback()
|
2019-02-09 12:30:49 +00:00
|
|
|
logger.Warn("[tags] scene mapping without checksum or path: ", mappingJSON)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Progressf("[tags] %d of %d scenes", index, len(t.Mappings.Scenes))
|
|
|
|
|
|
|
|
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Infof("[tags] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
// Return early if we are missing a json file.
|
|
|
|
if sceneJSON == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the tags from the tags json if we have it
|
|
|
|
if len(sceneJSON.Tags) > 0 {
|
|
|
|
tagNames = append(tagNames, sceneJSON.Tags...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the tags from the markers if we have marker json
|
2019-02-14 22:53:32 +00:00
|
|
|
if len(sceneJSON.Markers) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
for _, markerJSON := range sceneJSON.Markers {
|
|
|
|
if markerJSON.PrimaryTag != "" {
|
|
|
|
tagNames = append(tagNames, markerJSON.PrimaryTag)
|
|
|
|
}
|
|
|
|
if len(markerJSON.Tags) > 0 {
|
|
|
|
tagNames = append(tagNames, markerJSON.Tags...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
uniqueTagNames := t.getUnique(tagNames)
|
|
|
|
for _, tagName := range uniqueTagNames {
|
|
|
|
currentTime := time.Now()
|
|
|
|
newTag := models.Tag{
|
2019-02-14 22:53:32 +00:00
|
|
|
Name: tagName,
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
_, err := qb.Create(newTag, tx)
|
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[tags] <%s> failed to create: %s", tagName, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[tags] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[tags] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[tags] import complete")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewScrapedItemQueryBuilder()
|
|
|
|
sqb := models.NewStudioQueryBuilder()
|
|
|
|
currentTime := time.Now()
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Scraped {
|
|
|
|
index := i + 1
|
|
|
|
logger.Progressf("[scraped sites] %d of %d", index, len(t.Mappings.Scenes))
|
|
|
|
|
|
|
|
newScrapedItem := models.ScrapedItem{
|
2019-02-14 22:53:32 +00:00
|
|
|
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
|
|
|
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
|
|
|
URL: sql.NullString{String: mappingJSON.URL, Valid: true},
|
2019-03-05 01:14:52 +00:00
|
|
|
Date: models.SQLiteDate{String: mappingJSON.Date, Valid: true},
|
2019-02-14 22:53:32 +00:00
|
|
|
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
|
|
|
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
|
|
|
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
|
|
|
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
2019-02-09 12:30:49 +00:00
|
|
|
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
|
2019-02-14 22:53:32 +00:00
|
|
|
GalleryURL: sql.NullString{String: mappingJSON.GalleryURL, Valid: true},
|
|
|
|
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
|
|
|
VideoURL: sql.NullString{String: mappingJSON.VideoURL, Valid: true},
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
2019-03-27 19:53:15 +00:00
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(mappingJSON.UpdatedAt)},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2020-05-24 06:19:22 +00:00
|
|
|
studio, err := sqb.FindByName(mappingJSON.Studio, tx, false)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
|
|
|
|
}
|
|
|
|
if studio != nil {
|
|
|
|
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = qb.Create(newScrapedItem, tx)
|
|
|
|
if err != nil {
|
2019-02-14 22:53:32 +00:00
|
|
|
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title.String, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[scraped sites] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[scraped sites] import complete")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ImportTask) ImportScenes(ctx context.Context) {
|
|
|
|
tx := database.DB.MustBeginTx(ctx, nil)
|
|
|
|
qb := models.NewSceneQueryBuilder()
|
|
|
|
jqb := models.NewJoinsQueryBuilder()
|
|
|
|
|
|
|
|
for i, mappingJSON := range t.Mappings.Scenes {
|
|
|
|
index := i + 1
|
|
|
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
2019-02-10 20:15:36 +00:00
|
|
|
_ = tx.Rollback()
|
2019-02-09 12:30:49 +00:00
|
|
|
logger.Warn("[scenes] scene mapping without checksum or path: ", mappingJSON)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
|
|
|
|
|
|
|
|
newScene := models.Scene{
|
2019-03-27 19:53:15 +00:00
|
|
|
Checksum: mappingJSON.Checksum,
|
|
|
|
Path: mappingJSON.Path,
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
|
|
|
if err != nil {
|
|
|
|
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-12-31 22:38:49 +00:00
|
|
|
// Process the base 64 encoded cover image string
|
2020-06-22 23:19:19 +00:00
|
|
|
var coverImageData []byte
|
2019-12-31 22:38:49 +00:00
|
|
|
if sceneJSON.Cover != "" {
|
2020-06-22 23:19:19 +00:00
|
|
|
_, coverImageData, err = utils.ProcessBase64Image(sceneJSON.Cover)
|
2019-12-31 22:38:49 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("[scenes] <%s> invalid cover image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
if len(coverImageData) > 0 {
|
|
|
|
if err = SetSceneScreenshot(mappingJSON.Checksum, coverImageData); err != nil {
|
|
|
|
logger.Warnf("[scenes] <%s> failed to create cover image: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
}
|
2020-06-22 23:19:19 +00:00
|
|
|
|
|
|
|
// write the cover image data after creating the scene
|
2019-12-31 22:38:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
// Populate scene fields
|
|
|
|
if sceneJSON != nil {
|
|
|
|
if sceneJSON.Title != "" {
|
|
|
|
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.Details != "" {
|
|
|
|
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
|
|
|
}
|
2019-02-14 22:53:32 +00:00
|
|
|
if sceneJSON.URL != "" {
|
|
|
|
newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if sceneJSON.Date != "" {
|
2019-03-05 01:14:52 +00:00
|
|
|
newScene.Date = models.SQLiteDate{String: sceneJSON.Date, Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
if sceneJSON.Rating != 0 {
|
|
|
|
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
|
|
|
|
}
|
2020-04-22 23:14:58 +00:00
|
|
|
|
|
|
|
newScene.OCounter = sceneJSON.OCounter
|
2019-03-27 19:53:15 +00:00
|
|
|
newScene.CreatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.CreatedAt)}
|
|
|
|
newScene.UpdatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.UpdatedAt)}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
|
|
|
if sceneJSON.File != nil {
|
|
|
|
if sceneJSON.File.Size != "" {
|
|
|
|
newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.Duration != "" {
|
|
|
|
duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
|
|
|
newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.VideoCodec != "" {
|
|
|
|
newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.AudioCodec != "" {
|
|
|
|
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
|
|
|
|
}
|
2020-04-09 22:38:34 +00:00
|
|
|
if sceneJSON.File.Format != "" {
|
|
|
|
newScene.Format = sql.NullString{String: sceneJSON.File.Format, Valid: true}
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
if sceneJSON.File.Width != 0 {
|
|
|
|
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.Height != 0 {
|
|
|
|
newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.Framerate != "" {
|
|
|
|
framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
|
|
|
newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true}
|
|
|
|
}
|
|
|
|
if sceneJSON.File.Bitrate != 0 {
|
|
|
|
newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// TODO: Get FFMPEG data?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Populate the studio ID
|
|
|
|
if sceneJSON.Studio != "" {
|
|
|
|
sqb := models.NewStudioQueryBuilder()
|
2020-05-24 06:19:22 +00:00
|
|
|
studio, err := sqb.FindByName(sceneJSON.Studio, tx, false)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
2020-06-22 23:19:19 +00:00
|
|
|
logger.Warnf("[scenes] error getting studio <%s>: %s", sceneJSON.Studio, err.Error())
|
|
|
|
} else if studio == nil {
|
|
|
|
logger.Warnf("[scenes] studio <%s> does not exist", sceneJSON.Studio)
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
2019-02-14 22:53:32 +00:00
|
|
|
newScene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the scene in the DB
|
|
|
|
scene, err := qb.Create(newScene, tx)
|
|
|
|
if err != nil {
|
|
|
|
_ = tx.Rollback()
|
2019-11-15 21:34:01 +00:00
|
|
|
logger.Errorf("[scenes] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if scene.ID == 0 {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[scenes] <%s> invalid id after scene creation", mappingJSON.Checksum)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-06-22 23:19:19 +00:00
|
|
|
// Add the scene cover if set
|
|
|
|
if len(coverImageData) > 0 {
|
|
|
|
if err := qb.UpdateSceneCover(scene.ID, coverImageData, tx); err != nil {
|
|
|
|
_ = tx.Rollback()
|
|
|
|
logger.Errorf("[scenes] <%s> error setting scene cover: %s", mappingJSON.Checksum, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
// Relate the scene to the gallery
|
|
|
|
if sceneJSON.Gallery != "" {
|
|
|
|
gqb := models.NewGalleryQueryBuilder()
|
|
|
|
gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx)
|
|
|
|
if err != nil {
|
2019-02-14 22:53:32 +00:00
|
|
|
logger.Warnf("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
2019-02-14 22:53:32 +00:00
|
|
|
gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true}
|
2019-02-09 12:30:49 +00:00
|
|
|
_, err := gqb.Update(*gallery, tx)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Relate the scene to the performers
|
|
|
|
if len(sceneJSON.Performers) > 0 {
|
|
|
|
performers, err := t.getPerformers(sceneJSON.Performers, tx)
|
|
|
|
if err != nil {
|
2019-02-14 22:53:32 +00:00
|
|
|
logger.Warnf("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
|
|
|
var performerJoins []models.PerformersScenes
|
|
|
|
for _, performer := range performers {
|
|
|
|
join := models.PerformersScenes{
|
|
|
|
PerformerID: performer.ID,
|
2019-02-14 22:53:32 +00:00
|
|
|
SceneID: scene.ID,
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
performerJoins = append(performerJoins, join)
|
|
|
|
}
|
|
|
|
if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to associate performers: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
// Relate the scene to the movies
|
|
|
|
if len(sceneJSON.Movies) > 0 {
|
|
|
|
moviesScenes, err := t.getMoviesScenes(sceneJSON.Movies, scene.ID, tx)
|
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("[scenes] <%s> failed to fetch movies: %s", scene.Checksum, err.Error())
|
|
|
|
} else {
|
|
|
|
if err := jqb.CreateMoviesScenes(moviesScenes, tx); err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to associate movies: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
// Relate the scene to the tags
|
|
|
|
if len(sceneJSON.Tags) > 0 {
|
|
|
|
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
|
|
|
|
if err != nil {
|
2019-02-14 22:53:32 +00:00
|
|
|
logger.Warnf("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
|
|
|
var tagJoins []models.ScenesTags
|
|
|
|
for _, tag := range tags {
|
|
|
|
join := models.ScenesTags{
|
|
|
|
SceneID: scene.ID,
|
2019-02-14 22:53:32 +00:00
|
|
|
TagID: tag.ID,
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
tagJoins = append(tagJoins, join)
|
|
|
|
}
|
|
|
|
if err := jqb.CreateScenesTags(tagJoins, tx); err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to associate tags: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Relate the scene to the scene markers
|
|
|
|
if len(sceneJSON.Markers) > 0 {
|
|
|
|
smqb := models.NewSceneMarkerQueryBuilder()
|
|
|
|
tqb := models.NewTagQueryBuilder()
|
|
|
|
for _, marker := range sceneJSON.Markers {
|
|
|
|
seconds, _ := strconv.ParseFloat(marker.Seconds, 64)
|
|
|
|
newSceneMarker := models.SceneMarker{
|
2019-02-14 22:53:32 +00:00
|
|
|
Title: marker.Title,
|
|
|
|
Seconds: seconds,
|
|
|
|
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
2019-03-27 19:53:15 +00:00
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(marker.CreatedAt)},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(marker.UpdatedAt)},
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2020-05-24 06:19:22 +00:00
|
|
|
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx, false)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error())
|
|
|
|
} else {
|
2019-04-20 17:32:01 +00:00
|
|
|
newSceneMarker.PrimaryTagID = primaryTag.ID
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Create the scene marker in the DB
|
|
|
|
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("[scenes] <%s> failed to create scene marker: %s", scene.Checksum, err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if sceneMarker.ID == 0 {
|
|
|
|
logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", scene.Checksum)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the scene marker tags and create the joins
|
|
|
|
tags, err := t.getTags(scene.Checksum, marker.Tags, tx)
|
|
|
|
if err != nil {
|
2019-02-14 22:53:32 +00:00
|
|
|
logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err.Error())
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
|
|
|
var tagJoins []models.SceneMarkersTags
|
|
|
|
for _, tag := range tags {
|
|
|
|
join := models.SceneMarkersTags{
|
|
|
|
SceneMarkerID: sceneMarker.ID,
|
2019-02-14 22:53:32 +00:00
|
|
|
TagID: tag.ID,
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
tagJoins = append(tagJoins, join)
|
|
|
|
}
|
|
|
|
if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil {
|
|
|
|
logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", scene.Checksum, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Info("[scenes] importing")
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
logger.Errorf("[scenes] import failed to commit: %s", err.Error())
|
|
|
|
}
|
|
|
|
logger.Info("[scenes] import complete")
|
|
|
|
}
|
|
|
|
|
2019-05-27 19:34:26 +00:00
|
|
|
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Performer, error) {
|
2019-02-09 12:30:49 +00:00
|
|
|
pqb := models.NewPerformerQueryBuilder()
|
2020-05-24 06:19:22 +00:00
|
|
|
performers, err := pqb.FindByNames(names, tx, false)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var pluckedNames []string
|
|
|
|
for _, performer := range performers {
|
|
|
|
if !performer.Name.Valid {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
pluckedNames = append(pluckedNames, performer.Name.String)
|
|
|
|
}
|
|
|
|
|
|
|
|
missingPerformers := utils.StrFilter(names, func(name string) bool {
|
|
|
|
return !utils.StrInclude(pluckedNames, name)
|
|
|
|
})
|
|
|
|
|
|
|
|
for _, missingPerformer := range missingPerformers {
|
|
|
|
logger.Warnf("[scenes] performer %s does not exist", missingPerformer)
|
|
|
|
}
|
|
|
|
|
|
|
|
return performers, nil
|
|
|
|
}
|
|
|
|
|
2020-03-10 03:28:15 +00:00
|
|
|
func (t *ImportTask) getMoviesScenes(input []jsonschema.SceneMovie, sceneID int, tx *sqlx.Tx) ([]models.MoviesScenes, error) {
|
|
|
|
mqb := models.NewMovieQueryBuilder()
|
|
|
|
|
|
|
|
var movies []models.MoviesScenes
|
|
|
|
for _, inputMovie := range input {
|
2020-05-24 06:19:22 +00:00
|
|
|
movie, err := mqb.FindByName(inputMovie.MovieName, tx, false)
|
2020-03-10 03:28:15 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if movie == nil {
|
|
|
|
logger.Warnf("[scenes] movie %s does not exist", inputMovie.MovieName)
|
|
|
|
} else {
|
2020-04-22 01:22:14 +00:00
|
|
|
toAdd := models.MoviesScenes{
|
|
|
|
MovieID: movie.ID,
|
|
|
|
SceneID: sceneID,
|
|
|
|
}
|
|
|
|
|
|
|
|
if inputMovie.SceneIndex != 0 {
|
|
|
|
toAdd.SceneIndex = sql.NullInt64{
|
|
|
|
Int64: int64(inputMovie.SceneIndex),
|
|
|
|
Valid: true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
movies = append(movies, toAdd)
|
2020-03-10 03:28:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return movies, nil
|
|
|
|
}
|
|
|
|
|
2019-05-27 19:34:26 +00:00
|
|
|
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]*models.Tag, error) {
|
2019-02-09 12:30:49 +00:00
|
|
|
tqb := models.NewTagQueryBuilder()
|
2020-05-24 06:19:22 +00:00
|
|
|
tags, err := tqb.FindByNames(names, tx, false)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var pluckedNames []string
|
|
|
|
for _, tag := range tags {
|
|
|
|
if tag.Name == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
pluckedNames = append(pluckedNames, tag.Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
missingTags := utils.StrFilter(names, func(name string) bool {
|
|
|
|
return !utils.StrInclude(pluckedNames, name)
|
|
|
|
})
|
|
|
|
|
|
|
|
for _, missingTag := range missingTags {
|
|
|
|
logger.Warnf("[scenes] <%s> tag %s does not exist", sceneChecksum, missingTag)
|
|
|
|
}
|
|
|
|
|
|
|
|
return tags, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// https://www.reddit.com/r/golang/comments/5ia523/idiomatic_way_to_remove_duplicates_in_a_slice/db6qa2e
|
|
|
|
func (t *ImportTask) getUnique(s []string) []string {
|
|
|
|
seen := make(map[string]struct{}, len(s))
|
|
|
|
j := 0
|
|
|
|
for _, v := range s {
|
|
|
|
if _, ok := seen[v]; ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
seen[v] = struct{}{}
|
|
|
|
s[j] = v
|
|
|
|
j++
|
|
|
|
}
|
|
|
|
return s[:j]
|
2019-02-14 22:53:32 +00:00
|
|
|
}
|
2019-03-27 19:53:15 +00:00
|
|
|
|
|
|
|
var currentLocation = time.Now().Location()
|
|
|
|
|
|
|
|
func (t *ImportTask) getTimeFromJSONTime(jsonTime models.JSONTime) time.Time {
|
|
|
|
if currentLocation != nil {
|
|
|
|
if jsonTime.IsZero() {
|
|
|
|
return time.Now().In(currentLocation)
|
|
|
|
} else {
|
|
|
|
return jsonTime.Time.In(currentLocation)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if jsonTime.IsZero() {
|
|
|
|
return time.Now()
|
|
|
|
} else {
|
|
|
|
return jsonTime.Time
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|