2019-02-09 12:30:49 +00:00
|
|
|
package manager
|
|
|
|
|
|
|
|
import (
|
2020-10-12 23:12:46 +00:00
|
|
|
"archive/zip"
|
2019-02-09 12:30:49 +00:00
|
|
|
"context"
|
|
|
|
"database/sql"
|
2021-05-24 04:24:18 +00:00
|
|
|
"errors"
|
2020-11-04 23:26:51 +00:00
|
|
|
"fmt"
|
2020-10-12 23:12:46 +00:00
|
|
|
"os"
|
2019-08-15 22:47:35 +00:00
|
|
|
"path/filepath"
|
|
|
|
"strconv"
|
2020-03-19 01:36:00 +00:00
|
|
|
"strings"
|
2019-08-15 22:47:35 +00:00
|
|
|
"time"
|
|
|
|
|
2020-11-25 01:45:10 +00:00
|
|
|
"github.com/remeh/sizedwaitgroup"
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/ffmpeg"
|
2021-01-18 01:23:20 +00:00
|
|
|
"github.com/stashapp/stash/pkg/gallery"
|
2020-10-12 23:12:46 +00:00
|
|
|
"github.com/stashapp/stash/pkg/image"
|
2021-05-24 04:24:18 +00:00
|
|
|
"github.com/stashapp/stash/pkg/job"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/logger"
|
2020-10-12 23:12:46 +00:00
|
|
|
"github.com/stashapp/stash/pkg/manager/config"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/models"
|
2021-06-11 07:24:58 +00:00
|
|
|
"github.com/stashapp/stash/pkg/plugin"
|
2021-01-18 01:23:20 +00:00
|
|
|
"github.com/stashapp/stash/pkg/scene"
|
2019-02-14 23:42:52 +00:00
|
|
|
"github.com/stashapp/stash/pkg/utils"
|
2019-02-09 12:30:49 +00:00
|
|
|
)
|
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
type ScanJob struct {
|
|
|
|
txnManager models.TransactionManager
|
|
|
|
input models.ScanMetadataInput
|
|
|
|
subscriptions *subscriptionManager
|
|
|
|
}
|
|
|
|
|
|
|
|
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|
|
|
input := j.input
|
|
|
|
paths := getScanPaths(input.Paths)
|
|
|
|
|
|
|
|
var total *int
|
|
|
|
var newFiles *int
|
|
|
|
progress.ExecuteTask("Counting files to scan...", func() {
|
|
|
|
total, newFiles = j.neededScan(ctx, paths)
|
|
|
|
})
|
|
|
|
|
|
|
|
if job.IsCancelled(ctx) {
|
|
|
|
logger.Info("Stopping due to user request")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if total == nil || newFiles == nil {
|
|
|
|
logger.Infof("Taking too long to count content. Skipping...")
|
|
|
|
logger.Infof("Starting scan")
|
|
|
|
} else {
|
|
|
|
logger.Infof("Starting scan of %d files. %d New files found", *total, *newFiles)
|
|
|
|
}
|
|
|
|
|
|
|
|
start := time.Now()
|
|
|
|
config := config.GetInstance()
|
|
|
|
parallelTasks := config.GetParallelTasksWithAutoDetection()
|
|
|
|
logger.Infof("Scan started with %d parallel tasks", parallelTasks)
|
|
|
|
wg := sizedwaitgroup.New(parallelTasks)
|
|
|
|
|
|
|
|
if total != nil {
|
|
|
|
progress.SetTotal(*total)
|
|
|
|
}
|
|
|
|
|
|
|
|
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
|
|
|
|
calculateMD5 := config.IsCalculateMD5()
|
|
|
|
|
|
|
|
stoppingErr := errors.New("stopping")
|
|
|
|
var err error
|
|
|
|
|
|
|
|
var galleries []string
|
|
|
|
|
|
|
|
for _, sp := range paths {
|
2021-06-11 05:25:09 +00:00
|
|
|
csFs, er := utils.IsFsPathCaseSensitive(sp.Path)
|
|
|
|
if er != nil {
|
|
|
|
logger.Warnf("Cannot determine fs case sensitivity: %s", er.Error())
|
|
|
|
}
|
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
err = walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
|
|
|
|
if job.IsCancelled(ctx) {
|
|
|
|
return stoppingErr
|
|
|
|
}
|
|
|
|
|
2021-09-27 06:49:30 +00:00
|
|
|
// #1756 - skip zero length files and directories
|
|
|
|
if info.IsDir() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if info.Size() == 0 {
|
|
|
|
logger.Infof("Skipping zero-length file: %s", path)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
if isGallery(path) {
|
|
|
|
galleries = append(galleries, path)
|
|
|
|
}
|
|
|
|
|
2021-09-20 23:34:25 +00:00
|
|
|
if err := instance.Paths.Generated.EnsureTmpDir(); err != nil {
|
|
|
|
logger.Warnf("couldn't create temporary directory: %v", err)
|
|
|
|
}
|
2021-05-24 04:24:18 +00:00
|
|
|
|
|
|
|
wg.Add()
|
|
|
|
task := ScanTask{
|
|
|
|
TxnManager: j.txnManager,
|
|
|
|
FilePath: path,
|
|
|
|
UseFileMetadata: utils.IsTrue(input.UseFileMetadata),
|
|
|
|
StripFileExtension: utils.IsTrue(input.StripFileExtension),
|
|
|
|
fileNamingAlgorithm: fileNamingAlgo,
|
|
|
|
calculateMD5: calculateMD5,
|
|
|
|
GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews),
|
|
|
|
GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews),
|
|
|
|
GenerateSprite: utils.IsTrue(input.ScanGenerateSprites),
|
|
|
|
GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes),
|
2021-09-23 05:22:14 +00:00
|
|
|
GenerateThumbnails: utils.IsTrue(input.ScanGenerateThumbnails),
|
2021-05-24 04:24:18 +00:00
|
|
|
progress: progress,
|
2021-06-11 05:25:09 +00:00
|
|
|
CaseSensitiveFs: csFs,
|
2021-06-11 07:24:58 +00:00
|
|
|
ctx: ctx,
|
2021-05-24 04:24:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
go func() {
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
task.Start(ctx)
|
2021-09-22 03:22:59 +00:00
|
|
|
wg.Done()
|
2021-05-24 04:24:18 +00:00
|
|
|
progress.Increment()
|
|
|
|
}()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
Errorlint sweep + minor linter tweaks (#1796)
* Replace error assertions with Go 1.13 style
Use `errors.As(..)` over type assertions. This enables better use of
wrapped errors in the future, and lets us pass some errorlint checks
in the process.
The rewrite is entirely mechanical, and uses a standard idiom for
doing so.
* Use Go 1.13's errors.Is(..)
Rather than directly checking for error equality, use errors.Is(..).
This protects against error wrapping issues in the future.
Even though something like sql.ErrNoRows doesn't need the wrapping, do
so anyway, for the sake of consistency throughout the code base.
The change almost lets us pass the `errorlint` Go checker except for
a missing case in `js.go` which is to be handled separately; it isn't
mechanical, like these changes are.
* Remove goconst
goconst isn't a useful linter in many cases, because it's false positive
rate is high. It's 100% for the current code base.
* Avoid direct comparison of errors in recover()
Assert that we are catching an error from recover(). If we are,
check that the error caught matches errStop.
* Enable the "errorlint" checker
Configure the checker to avoid checking for errorf wraps. These are
often false positives since the suggestion is to blanket wrap errors
with %w, and that exposes the underlying API which you might not want
to do.
The other warnings are good however, and with the current patch stack,
the code base passes all these checks as well.
* Configure rowserrcheck
The project uses sqlx. Configure rowserrcheck to include said package.
* Mechanically rewrite a large set of errors
Mechanically search for errors that look like
fmt.Errorf("...%s", err.Error())
and rewrite those into
fmt.Errorf("...%v", err)
The `fmt` package is error-aware and knows how to call err.Error()
itself.
The rationale is that this is more idiomatic Go; it paves the
way for using error wrapping later with %w in some sites.
This patch only addresses the entirely mechanical rewriting caught by
a project-side search/replace. There are more individual sites not
addressed by this patch.
2021-10-12 03:03:08 +00:00
|
|
|
if errors.Is(err, stoppingErr) {
|
2021-05-24 04:24:18 +00:00
|
|
|
logger.Info("Stopping due to user request")
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error encountered scanning files: %s", err.Error())
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
wg.Wait()
|
2021-09-20 23:34:25 +00:00
|
|
|
|
|
|
|
if err := instance.Paths.Generated.EmptyTmpDir(); err != nil {
|
|
|
|
logger.Warnf("couldn't empty temporary directory: %v", err)
|
|
|
|
}
|
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
elapsed := time.Since(start)
|
|
|
|
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
|
|
|
|
|
|
|
|
if job.IsCancelled(ctx) || err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
progress.ExecuteTask("Associating galleries", func() {
|
|
|
|
for _, path := range galleries {
|
|
|
|
wg.Add()
|
|
|
|
task := ScanTask{
|
|
|
|
TxnManager: j.txnManager,
|
|
|
|
FilePath: path,
|
|
|
|
UseFileMetadata: false,
|
|
|
|
}
|
|
|
|
|
|
|
|
go task.associateGallery(&wg)
|
|
|
|
wg.Wait()
|
|
|
|
}
|
|
|
|
logger.Info("Finished gallery association")
|
|
|
|
})
|
|
|
|
|
|
|
|
j.subscriptions.notify()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (j *ScanJob) neededScan(ctx context.Context, paths []*models.StashConfig) (total *int, newFiles *int) {
|
|
|
|
const timeout = 90 * time.Second
|
|
|
|
|
|
|
|
// create a control channel through which to signal the counting loop when the timeout is reached
|
|
|
|
chTimeout := time.After(timeout)
|
|
|
|
|
|
|
|
logger.Infof("Counting files to scan...")
|
|
|
|
|
|
|
|
t := 0
|
|
|
|
n := 0
|
|
|
|
|
|
|
|
timeoutErr := errors.New("timed out")
|
|
|
|
|
|
|
|
for _, sp := range paths {
|
|
|
|
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
|
|
|
|
t++
|
|
|
|
task := ScanTask{FilePath: path, TxnManager: j.txnManager}
|
|
|
|
if !task.doesPathExist() {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
|
|
|
|
//check for timeout
|
|
|
|
select {
|
|
|
|
case <-chTimeout:
|
|
|
|
return timeoutErr
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// check stop
|
|
|
|
if job.IsCancelled(ctx) {
|
|
|
|
return timeoutErr
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
Errorlint sweep + minor linter tweaks (#1796)
* Replace error assertions with Go 1.13 style
Use `errors.As(..)` over type assertions. This enables better use of
wrapped errors in the future, and lets us pass some errorlint checks
in the process.
The rewrite is entirely mechanical, and uses a standard idiom for
doing so.
* Use Go 1.13's errors.Is(..)
Rather than directly checking for error equality, use errors.Is(..).
This protects against error wrapping issues in the future.
Even though something like sql.ErrNoRows doesn't need the wrapping, do
so anyway, for the sake of consistency throughout the code base.
The change almost lets us pass the `errorlint` Go checker except for
a missing case in `js.go` which is to be handled separately; it isn't
mechanical, like these changes are.
* Remove goconst
goconst isn't a useful linter in many cases, because it's false positive
rate is high. It's 100% for the current code base.
* Avoid direct comparison of errors in recover()
Assert that we are catching an error from recover(). If we are,
check that the error caught matches errStop.
* Enable the "errorlint" checker
Configure the checker to avoid checking for errorf wraps. These are
often false positives since the suggestion is to blanket wrap errors
with %w, and that exposes the underlying API which you might not want
to do.
The other warnings are good however, and with the current patch stack,
the code base passes all these checks as well.
* Configure rowserrcheck
The project uses sqlx. Configure rowserrcheck to include said package.
* Mechanically rewrite a large set of errors
Mechanically search for errors that look like
fmt.Errorf("...%s", err.Error())
and rewrite those into
fmt.Errorf("...%v", err)
The `fmt` package is error-aware and knows how to call err.Error()
itself.
The rationale is that this is more idiomatic Go; it paves the
way for using error wrapping later with %w in some sites.
This patch only addresses the entirely mechanical rewriting caught by
a project-side search/replace. There are more individual sites not
addressed by this patch.
2021-10-12 03:03:08 +00:00
|
|
|
if errors.Is(err, timeoutErr) {
|
2021-05-24 04:24:18 +00:00
|
|
|
// timeout should return nil counts
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error encountered counting files to scan: %s", err.Error())
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &t, &n
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
type ScanTask struct {
|
2021-06-11 07:24:58 +00:00
|
|
|
ctx context.Context
|
2021-01-18 01:23:20 +00:00
|
|
|
TxnManager models.TransactionManager
|
2020-11-25 01:45:10 +00:00
|
|
|
FilePath string
|
|
|
|
UseFileMetadata bool
|
2021-01-07 00:38:30 +00:00
|
|
|
StripFileExtension bool
|
2020-11-25 01:45:10 +00:00
|
|
|
calculateMD5 bool
|
|
|
|
fileNamingAlgorithm models.HashAlgorithm
|
|
|
|
GenerateSprite bool
|
2021-04-11 23:04:40 +00:00
|
|
|
GeneratePhash bool
|
2020-11-25 01:45:10 +00:00
|
|
|
GeneratePreview bool
|
|
|
|
GenerateImagePreview bool
|
2021-09-23 05:22:14 +00:00
|
|
|
GenerateThumbnails bool
|
2020-11-25 01:45:10 +00:00
|
|
|
zipGallery *models.Gallery
|
2021-05-24 04:24:18 +00:00
|
|
|
progress *job.Progress
|
2021-06-11 05:25:09 +00:00
|
|
|
CaseSensitiveFs bool
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
func (t *ScanTask) Start(ctx context.Context) {
|
2021-05-24 04:24:18 +00:00
|
|
|
var s *models.Scene
|
2020-11-25 01:45:10 +00:00
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
t.progress.ExecuteTask("Scanning "+t.FilePath, func() {
|
|
|
|
if isGallery(t.FilePath) {
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
t.scanGallery(ctx)
|
2021-05-24 04:24:18 +00:00
|
|
|
} else if isVideo(t.FilePath) {
|
|
|
|
s = t.scanScene()
|
|
|
|
} else if isImage(t.FilePath) {
|
|
|
|
t.scanImage()
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
if s != nil {
|
|
|
|
iwg := sizedwaitgroup.New(2)
|
|
|
|
|
|
|
|
if t.GenerateSprite {
|
|
|
|
iwg.Add()
|
|
|
|
|
|
|
|
go t.progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", t.FilePath), func() {
|
2021-01-18 01:23:20 +00:00
|
|
|
taskSprite := GenerateSpriteTask{
|
|
|
|
Scene: *s,
|
|
|
|
Overwrite: false,
|
|
|
|
fileNamingAlgorithm: t.fileNamingAlgorithm,
|
|
|
|
}
|
2021-09-22 03:22:59 +00:00
|
|
|
taskSprite.Start()
|
|
|
|
iwg.Done()
|
2021-05-24 04:24:18 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
if t.GeneratePhash {
|
|
|
|
iwg.Add()
|
2020-11-25 01:45:10 +00:00
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
go t.progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", t.FilePath), func() {
|
2021-04-11 23:04:40 +00:00
|
|
|
taskPhash := GeneratePhashTask{
|
|
|
|
Scene: *s,
|
|
|
|
fileNamingAlgorithm: t.fileNamingAlgorithm,
|
|
|
|
txnManager: t.TxnManager,
|
|
|
|
}
|
2021-09-22 03:22:59 +00:00
|
|
|
taskPhash.Start()
|
|
|
|
iwg.Done()
|
2021-05-24 04:24:18 +00:00
|
|
|
})
|
|
|
|
}
|
2021-04-11 23:04:40 +00:00
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
if t.GeneratePreview {
|
|
|
|
iwg.Add()
|
2020-11-25 01:45:10 +00:00
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", t.FilePath), func() {
|
2021-04-11 23:31:33 +00:00
|
|
|
config := config.GetInstance()
|
2020-11-25 01:45:10 +00:00
|
|
|
var previewSegmentDuration = config.GetPreviewSegmentDuration()
|
|
|
|
var previewSegments = config.GetPreviewSegments()
|
|
|
|
var previewExcludeStart = config.GetPreviewExcludeStart()
|
|
|
|
var previewExcludeEnd = config.GetPreviewExcludeEnd()
|
|
|
|
var previewPresent = config.GetPreviewPreset()
|
|
|
|
|
|
|
|
// NOTE: the reuse of this model like this is painful.
|
|
|
|
previewOptions := models.GeneratePreviewOptionsInput{
|
|
|
|
PreviewSegments: &previewSegments,
|
|
|
|
PreviewSegmentDuration: &previewSegmentDuration,
|
|
|
|
PreviewExcludeStart: &previewExcludeStart,
|
|
|
|
PreviewExcludeEnd: &previewExcludeEnd,
|
|
|
|
PreviewPreset: &previewPresent,
|
|
|
|
}
|
|
|
|
|
|
|
|
taskPreview := GeneratePreviewTask{
|
2021-01-18 01:23:20 +00:00
|
|
|
Scene: *s,
|
2020-11-25 01:45:10 +00:00
|
|
|
ImagePreview: t.GenerateImagePreview,
|
|
|
|
Options: previewOptions,
|
|
|
|
Overwrite: false,
|
|
|
|
fileNamingAlgorithm: t.fileNamingAlgorithm,
|
|
|
|
}
|
2021-09-22 03:22:59 +00:00
|
|
|
taskPreview.Start()
|
|
|
|
iwg.Done()
|
2021-05-24 04:24:18 +00:00
|
|
|
})
|
2020-11-25 01:45:10 +00:00
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2021-05-24 04:24:18 +00:00
|
|
|
iwg.Wait()
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
func (t *ScanTask) scanGallery(ctx context.Context) {
|
2021-01-18 01:23:20 +00:00
|
|
|
var g *models.Gallery
|
|
|
|
images := 0
|
|
|
|
scanImages := false
|
|
|
|
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
if err := t.TxnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
2021-01-18 01:23:20 +00:00
|
|
|
var err error
|
|
|
|
g, err = r.Gallery().FindByPath(t.FilePath)
|
|
|
|
|
|
|
|
if g != nil && err != nil {
|
|
|
|
images, err = r.Image().CountByGalleryID(g.ID)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-05-11 07:20:08 +00:00
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
fileModTime, err := t.getFileModTime()
|
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if g != nil {
|
2019-02-09 12:30:49 +00:00
|
|
|
// We already have this item in the database, keep going
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
// if file mod time is not set, set it now
|
2021-01-18 01:23:20 +00:00
|
|
|
if !g.FileModTime.Valid {
|
|
|
|
// we will also need to rescan the zip contents
|
|
|
|
scanImages = true
|
|
|
|
logger.Infof("setting file modification time on %s", t.FilePath)
|
|
|
|
|
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Gallery()
|
|
|
|
if _, err := gallery.UpdateFileModTime(qb, g.ID, models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-11-04 23:26:51 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
// update our copy of the gallery
|
|
|
|
var err error
|
|
|
|
g, err = qb.Find(g.ID)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if the mod time of the zip file is different than that of the associated
|
|
|
|
// gallery, then recalculate the checksum
|
2021-01-18 01:23:20 +00:00
|
|
|
modified := t.isFileModified(fileModTime, g.FileModTime)
|
2020-11-04 23:26:51 +00:00
|
|
|
if modified {
|
2021-01-18 01:23:20 +00:00
|
|
|
scanImages = true
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
|
|
|
|
|
|
|
// update the checksum and the modification time
|
|
|
|
checksum, err := t.calculateChecksum()
|
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
currentTime := time.Now()
|
|
|
|
galleryPartial := models.GalleryPartial{
|
2021-01-18 01:23:20 +00:00
|
|
|
ID: g.ID,
|
2020-11-04 23:26:51 +00:00
|
|
|
Checksum: &checksum,
|
|
|
|
FileModTime: &models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
_, err := r.Gallery().UpdatePartial(galleryPartial)
|
2020-11-04 23:26:51 +00:00
|
|
|
return err
|
2021-01-18 01:23:20 +00:00
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
// scan the zip files if the gallery has no images
|
2021-01-18 01:23:20 +00:00
|
|
|
scanImages = scanImages || images == 0
|
|
|
|
} else {
|
|
|
|
checksum, err := t.calculateChecksum()
|
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
2019-08-13 13:41:56 +00:00
|
|
|
}
|
2020-03-19 01:36:00 +00:00
|
|
|
|
2021-09-19 00:28:34 +00:00
|
|
|
isNewGallery := false
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Gallery()
|
|
|
|
g, _ = qb.FindByChecksum(checksum)
|
|
|
|
if g != nil {
|
|
|
|
exists, _ := utils.FileExists(g.Path.String)
|
2021-06-11 05:25:09 +00:00
|
|
|
if !t.CaseSensitiveFs {
|
|
|
|
// #1426 - if file exists but is a case-insensitive match for the
|
|
|
|
// original filename, then treat it as a move
|
|
|
|
if exists && strings.EqualFold(t.FilePath, g.Path.String) {
|
|
|
|
exists = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if exists {
|
|
|
|
logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, g.Path.String)
|
|
|
|
} else {
|
|
|
|
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
|
|
|
g.Path = sql.NullString{
|
|
|
|
String: t.FilePath,
|
|
|
|
Valid: true,
|
|
|
|
}
|
|
|
|
g, err = qb.Update(*g)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-06-11 07:24:58 +00:00
|
|
|
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, g.ID, plugin.GalleryUpdatePost, nil, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
currentTime := time.Now()
|
|
|
|
|
|
|
|
newGallery := models.Gallery{
|
|
|
|
Checksum: checksum,
|
|
|
|
Zip: true,
|
|
|
|
Path: sql.NullString{
|
|
|
|
String: t.FilePath,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
FileModTime: models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
2021-05-03 04:21:51 +00:00
|
|
|
Title: sql.NullString{
|
|
|
|
String: utils.GetNameFromPath(t.FilePath, t.StripFileExtension),
|
|
|
|
Valid: true,
|
|
|
|
},
|
2021-01-18 01:23:20 +00:00
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
}
|
2020-06-21 11:43:57 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
// don't create gallery if it has no images
|
|
|
|
if countImagesInZip(t.FilePath) > 0 {
|
|
|
|
// only warn when creating the gallery
|
|
|
|
ok, err := utils.IsZipFileUncompressed(t.FilePath)
|
|
|
|
if err == nil && !ok {
|
|
|
|
logger.Warnf("%s is using above store (0) level compression.", t.FilePath)
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
|
|
|
g, err = qb.Create(newGallery)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
scanImages = true
|
2021-06-11 07:24:58 +00:00
|
|
|
|
2021-09-19 00:28:34 +00:00
|
|
|
isNewGallery = true
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2020-10-11 01:02:41 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
2020-06-21 11:43:57 +00:00
|
|
|
}
|
2021-09-19 00:28:34 +00:00
|
|
|
|
|
|
|
if isNewGallery {
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, g.ID, plugin.GalleryCreatePost, nil, nil)
|
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if g != nil {
|
|
|
|
if scanImages {
|
|
|
|
t.scanZipImages(g)
|
|
|
|
} else {
|
|
|
|
// in case thumbnails have been deleted, regenerate them
|
|
|
|
t.regenerateZipImages(g)
|
|
|
|
}
|
2020-11-04 23:26:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ScanTask) getFileModTime() (time.Time, error) {
|
|
|
|
fi, err := os.Stat(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, fmt.Errorf("error performing stat on %s: %s", t.FilePath, err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
ret := fi.ModTime()
|
|
|
|
// truncate to seconds, since we don't store beyond that in the database
|
|
|
|
ret = ret.Truncate(time.Second)
|
|
|
|
|
|
|
|
return ret, nil
|
|
|
|
}
|
|
|
|
|
2021-05-24 03:34:28 +00:00
|
|
|
func (t *ScanTask) getInteractive() bool {
|
|
|
|
_, err := os.Stat(utils.GetFunscriptPath(t.FilePath))
|
|
|
|
return err == nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
func (t *ScanTask) isFileModified(fileModTime time.Time, modTime models.NullSQLiteTimestamp) bool {
|
|
|
|
return !modTime.Timestamp.Equal(fileModTime)
|
|
|
|
}
|
|
|
|
|
2020-03-19 01:36:00 +00:00
|
|
|
// associates a gallery to a scene with the same basename
|
2020-11-25 01:45:10 +00:00
|
|
|
func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Gallery()
|
|
|
|
sqb := r.Scene()
|
|
|
|
g, err := qb.FindByPath(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2020-03-19 01:36:00 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if g == nil {
|
|
|
|
// associate is run after scan is finished
|
|
|
|
// should only happen if gallery is a directory or an io error occurs during hashing
|
|
|
|
logger.Warnf("associate: gallery %s not found in DB", t.FilePath)
|
|
|
|
return nil
|
|
|
|
}
|
2020-03-19 01:36:00 +00:00
|
|
|
|
2021-02-01 20:56:54 +00:00
|
|
|
basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath))
|
|
|
|
var relatedFiles []string
|
2021-04-11 23:31:33 +00:00
|
|
|
vExt := config.GetInstance().GetVideoExtensions()
|
2021-02-01 20:56:54 +00:00
|
|
|
// make a list of media files that can be related to the gallery
|
|
|
|
for _, ext := range vExt {
|
|
|
|
related := basename + "." + ext
|
|
|
|
// exclude gallery extensions from the related files
|
|
|
|
if !isGallery(related) {
|
|
|
|
relatedFiles = append(relatedFiles, related)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2021-02-01 20:56:54 +00:00
|
|
|
}
|
|
|
|
for _, scenePath := range relatedFiles {
|
|
|
|
scene, _ := sqb.FindByPath(scenePath)
|
|
|
|
// found related Scene
|
|
|
|
if scene != nil {
|
2021-03-01 05:37:55 +00:00
|
|
|
sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene
|
|
|
|
isAssoc := false
|
|
|
|
for _, sg := range sceneGalleries {
|
|
|
|
if scene.ID == sg.ID {
|
|
|
|
isAssoc = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !isAssoc {
|
|
|
|
logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID)
|
|
|
|
if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2020-03-19 01:36:00 +00:00
|
|
|
}
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
2020-03-19 01:36:00 +00:00
|
|
|
}
|
|
|
|
wg.Done()
|
|
|
|
}
|
|
|
|
|
2020-11-25 01:45:10 +00:00
|
|
|
func (t *ScanTask) scanScene() *models.Scene {
|
2021-01-18 01:23:20 +00:00
|
|
|
logError := func(err error) *models.Scene {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
2020-11-04 23:26:51 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
var retScene *models.Scene
|
|
|
|
var s *models.Scene
|
|
|
|
|
|
|
|
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
|
|
var err error
|
|
|
|
s, err = r.Scene().FindByPath(t.FilePath)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Error(err.Error())
|
2020-11-25 01:45:10 +00:00
|
|
|
return nil
|
2020-11-04 23:26:51 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
fileModTime, err := t.getFileModTime()
|
|
|
|
if err != nil {
|
|
|
|
return logError(err)
|
|
|
|
}
|
2021-05-24 03:34:28 +00:00
|
|
|
interactive := t.getInteractive()
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if s != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
// if file mod time is not set, set it now
|
2021-01-18 01:23:20 +00:00
|
|
|
if !s.FileModTime.Valid {
|
|
|
|
logger.Infof("setting file modification time on %s", t.FilePath)
|
2020-11-04 23:26:51 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Scene()
|
|
|
|
if _, err := scene.UpdateFileModTime(qb, s.ID, models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// update our copy of the scene
|
|
|
|
var err error
|
|
|
|
s, err = qb.Find(s.ID)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
2020-11-04 23:26:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if the mod time of the file is different than that of the associated
|
|
|
|
// scene, then recalculate the checksum and regenerate the thumbnail
|
2021-01-18 01:23:20 +00:00
|
|
|
modified := t.isFileModified(fileModTime, s.FileModTime)
|
2021-04-11 23:31:33 +00:00
|
|
|
config := config.GetInstance()
|
2021-01-18 01:23:20 +00:00
|
|
|
if modified || !s.Size.Valid {
|
2021-02-09 23:50:34 +00:00
|
|
|
oldHash := s.GetHash(config.GetVideoFileNamingAlgorithm())
|
2021-01-18 01:23:20 +00:00
|
|
|
s, err = t.rescanScene(s, fileModTime)
|
2020-11-04 23:26:51 +00:00
|
|
|
if err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return logError(err)
|
2020-11-04 23:26:51 +00:00
|
|
|
}
|
2021-02-09 23:50:34 +00:00
|
|
|
|
|
|
|
// Migrate any generated files if the hash has changed
|
|
|
|
newHash := s.GetHash(config.GetVideoFileNamingAlgorithm())
|
|
|
|
if newHash != oldHash {
|
|
|
|
MigrateHash(oldHash, newHash)
|
|
|
|
}
|
2020-11-04 23:26:51 +00:00
|
|
|
}
|
|
|
|
|
2020-04-09 22:38:34 +00:00
|
|
|
// We already have this item in the database
|
2020-08-06 01:21:14 +00:00
|
|
|
// check for thumbnails,screenshots
|
2021-01-18 01:23:20 +00:00
|
|
|
t.makeScreenshots(nil, s.GetHash(t.fileNamingAlgorithm))
|
2020-04-09 22:38:34 +00:00
|
|
|
|
2020-08-06 01:21:14 +00:00
|
|
|
// check for container
|
2021-01-18 01:23:20 +00:00
|
|
|
if !s.Format.Valid {
|
2021-01-07 00:38:30 +00:00
|
|
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension)
|
2020-04-09 22:38:34 +00:00
|
|
|
if err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return logError(err)
|
2020-04-09 22:38:34 +00:00
|
|
|
}
|
|
|
|
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
|
|
|
logger.Infof("Adding container %s to file %s", container, t.FilePath)
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
_, err := scene.UpdateFormat(r.Scene(), s.ID, string(container))
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
2020-04-09 22:38:34 +00:00
|
|
|
}
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// check if oshash is set
|
2021-01-18 01:23:20 +00:00
|
|
|
if !s.OSHash.Valid {
|
2020-08-06 01:21:14 +00:00
|
|
|
logger.Infof("Calculating oshash for existing file %s ...", t.FilePath)
|
|
|
|
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
|
|
|
if err != nil {
|
2020-11-25 01:45:10 +00:00
|
|
|
return nil
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Scene()
|
|
|
|
// check if oshash clashes with existing scene
|
|
|
|
dupe, _ := qb.FindByOSHash(oshash)
|
|
|
|
if dupe != nil {
|
|
|
|
return fmt.Errorf("OSHash for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
|
|
|
}
|
2020-08-08 01:22:25 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
_, err := scene.UpdateOSHash(qb, s.ID, oshash)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// check if MD5 is set, if calculateMD5 is true
|
2021-01-18 01:23:20 +00:00
|
|
|
if t.calculateMD5 && !s.Checksum.Valid {
|
2020-08-06 01:21:14 +00:00
|
|
|
checksum, err := t.calculateChecksum()
|
|
|
|
if err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return logError(err)
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
2020-04-09 22:38:34 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Scene()
|
|
|
|
// check if checksum clashes with existing scene
|
|
|
|
dupe, _ := qb.FindByChecksum(checksum)
|
|
|
|
if dupe != nil {
|
|
|
|
return fmt.Errorf("MD5 for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
|
|
|
}
|
2020-08-08 01:22:25 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
_, err := scene.UpdateChecksum(qb, s.ID, checksum)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
2020-04-09 22:38:34 +00:00
|
|
|
}
|
2020-08-06 01:21:14 +00:00
|
|
|
|
2021-05-24 03:34:28 +00:00
|
|
|
if s.Interactive != interactive {
|
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Scene()
|
|
|
|
scenePartial := models.ScenePartial{
|
|
|
|
ID: s.ID,
|
|
|
|
Interactive: &interactive,
|
|
|
|
}
|
|
|
|
_, err := qb.Update(scenePartial)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-25 01:45:10 +00:00
|
|
|
return nil
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2021-01-07 00:38:30 +00:00
|
|
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension)
|
2019-04-20 17:39:24 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
2020-11-25 01:45:10 +00:00
|
|
|
return nil
|
2019-04-20 17:39:24 +00:00
|
|
|
}
|
2020-04-09 22:38:34 +00:00
|
|
|
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
2019-04-20 17:39:24 +00:00
|
|
|
|
2019-12-13 06:18:02 +00:00
|
|
|
// Override title to be filename if UseFileMetadata is false
|
|
|
|
if !t.UseFileMetadata {
|
2021-01-07 00:38:30 +00:00
|
|
|
videoFile.SetTitleFromPath(t.StripFileExtension)
|
2019-10-12 08:20:27 +00:00
|
|
|
}
|
|
|
|
|
2020-08-06 01:21:14 +00:00
|
|
|
var checksum string
|
|
|
|
|
2020-10-11 01:02:41 +00:00
|
|
|
logger.Infof("%s not found. Calculating oshash...", t.FilePath)
|
2020-08-06 01:21:14 +00:00
|
|
|
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
2019-02-09 12:30:49 +00:00
|
|
|
if err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return logError(err)
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2020-08-06 01:21:14 +00:00
|
|
|
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 || t.calculateMD5 {
|
|
|
|
checksum, err = t.calculateChecksum()
|
|
|
|
if err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return logError(err)
|
2020-08-06 01:21:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-08 01:22:25 +00:00
|
|
|
// check for scene by checksum and oshash - MD5 should be
|
|
|
|
// redundant, but check both
|
2021-09-20 23:34:25 +00:00
|
|
|
txnErr := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
2021-01-18 01:23:20 +00:00
|
|
|
qb := r.Scene()
|
|
|
|
if checksum != "" {
|
|
|
|
s, _ = qb.FindByChecksum(checksum)
|
|
|
|
}
|
2020-08-08 01:22:25 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if s == nil {
|
|
|
|
s, _ = qb.FindByOSHash(oshash)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
2021-09-20 23:34:25 +00:00
|
|
|
if txnErr != nil {
|
|
|
|
logger.Warnf("error in read transaction: %v", txnErr)
|
|
|
|
}
|
2020-08-08 01:22:25 +00:00
|
|
|
|
2020-08-06 01:21:14 +00:00
|
|
|
sceneHash := oshash
|
2020-08-08 01:22:25 +00:00
|
|
|
|
2020-08-06 01:21:14 +00:00
|
|
|
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 {
|
|
|
|
sceneHash = checksum
|
|
|
|
}
|
|
|
|
|
|
|
|
t.makeScreenshots(videoFile, sceneHash)
|
2019-02-09 12:30:49 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if s != nil {
|
|
|
|
exists, _ := utils.FileExists(s.Path)
|
2021-06-11 05:25:09 +00:00
|
|
|
if !t.CaseSensitiveFs {
|
|
|
|
// #1426 - if file exists but is a case-insensitive match for the
|
|
|
|
// original filename, then treat it as a move
|
|
|
|
if exists && strings.EqualFold(t.FilePath, s.Path) {
|
|
|
|
exists = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-13 13:41:56 +00:00
|
|
|
if exists {
|
2021-01-18 01:23:20 +00:00
|
|
|
logger.Infof("%s already exists. Duplicate of %s", t.FilePath, s.Path)
|
2019-08-13 13:41:56 +00:00
|
|
|
} else {
|
2020-10-11 01:02:41 +00:00
|
|
|
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
2019-10-14 21:57:53 +00:00
|
|
|
scenePartial := models.ScenePartial{
|
2021-05-24 03:34:28 +00:00
|
|
|
ID: s.ID,
|
|
|
|
Path: &t.FilePath,
|
|
|
|
Interactive: &interactive,
|
2019-10-14 21:57:53 +00:00
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
_, err := r.Scene().Update(scenePartial)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
|
|
|
}
|
2021-06-11 07:24:58 +00:00
|
|
|
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, s.ID, plugin.SceneUpdatePost, nil, nil)
|
2019-08-13 13:41:56 +00:00
|
|
|
}
|
2019-02-09 12:30:49 +00:00
|
|
|
} else {
|
2020-10-11 01:02:41 +00:00
|
|
|
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
2019-02-09 12:30:49 +00:00
|
|
|
currentTime := time.Now()
|
|
|
|
newScene := models.Scene{
|
2020-08-06 01:21:14 +00:00
|
|
|
Checksum: sql.NullString{String: checksum, Valid: checksum != ""},
|
|
|
|
OSHash: sql.NullString{String: oshash, Valid: oshash != ""},
|
2019-02-14 22:53:32 +00:00
|
|
|
Path: t.FilePath,
|
2019-08-15 22:47:35 +00:00
|
|
|
Title: sql.NullString{String: videoFile.Title, Valid: true},
|
2019-02-14 22:53:32 +00:00
|
|
|
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
|
|
|
|
VideoCodec: sql.NullString{String: videoFile.VideoCodec, Valid: true},
|
|
|
|
AudioCodec: sql.NullString{String: videoFile.AudioCodec, Valid: true},
|
2020-04-09 22:38:34 +00:00
|
|
|
Format: sql.NullString{String: string(container), Valid: true},
|
2019-02-14 22:53:32 +00:00
|
|
|
Width: sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
|
|
|
|
Height: sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
|
|
|
|
Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
|
|
|
Bitrate: sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
2020-12-21 23:29:53 +00:00
|
|
|
Size: sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true},
|
2020-11-04 23:26:51 +00:00
|
|
|
FileModTime: models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
2021-05-24 03:34:28 +00:00
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
Interactive: interactive,
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
2019-12-13 06:18:02 +00:00
|
|
|
|
|
|
|
if t.UseFileMetadata {
|
|
|
|
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
|
|
|
|
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
|
|
|
|
}
|
2020-11-25 01:45:10 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
var err error
|
|
|
|
retScene, err = r.Scene().Create(newScene)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
return logError(err)
|
|
|
|
}
|
2021-06-11 07:24:58 +00:00
|
|
|
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, retScene.ID, plugin.SceneCreatePost, nil, nil)
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
2020-11-25 01:45:10 +00:00
|
|
|
|
|
|
|
return retScene
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func (t *ScanTask) rescanScene(s *models.Scene, fileModTime time.Time) (*models.Scene, error) {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
|
|
|
|
|
|
|
// update the oshash/checksum and the modification time
|
|
|
|
logger.Infof("Calculating oshash for existing file %s ...", t.FilePath)
|
|
|
|
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var checksum *sql.NullString
|
|
|
|
if t.calculateMD5 {
|
|
|
|
cs, err := t.calculateChecksum()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
checksum = &sql.NullString{
|
|
|
|
String: cs,
|
|
|
|
Valid: true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// regenerate the file details as well
|
2021-01-07 00:38:30 +00:00
|
|
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension)
|
2020-11-04 23:26:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
|
|
|
|
|
|
|
currentTime := time.Now()
|
|
|
|
scenePartial := models.ScenePartial{
|
2021-01-18 01:23:20 +00:00
|
|
|
ID: s.ID,
|
2020-11-04 23:26:51 +00:00
|
|
|
Checksum: checksum,
|
|
|
|
OSHash: &sql.NullString{
|
|
|
|
String: oshash,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
Duration: &sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
|
|
|
|
VideoCodec: &sql.NullString{String: videoFile.VideoCodec, Valid: true},
|
|
|
|
AudioCodec: &sql.NullString{String: videoFile.AudioCodec, Valid: true},
|
|
|
|
Format: &sql.NullString{String: string(container), Valid: true},
|
|
|
|
Width: &sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
|
|
|
|
Height: &sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
|
|
|
|
Framerate: &sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
|
|
|
Bitrate: &sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
2020-12-21 23:29:53 +00:00
|
|
|
Size: &sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true},
|
2020-11-04 23:26:51 +00:00
|
|
|
FileModTime: &models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
}
|
|
|
|
|
|
|
|
var ret *models.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
var err error
|
|
|
|
ret, err = r.Scene().Update(scenePartial)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Error(err.Error())
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-06-11 07:24:58 +00:00
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, ret.ID, plugin.SceneUpdatePost, nil, nil)
|
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
// leave the generated files as is - the scene file may have been moved
|
|
|
|
// elsewhere
|
|
|
|
|
|
|
|
return ret, nil
|
|
|
|
}
|
2019-10-17 13:50:30 +00:00
|
|
|
func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum string) {
|
2019-02-09 12:30:49 +00:00
|
|
|
thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum)
|
|
|
|
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
|
|
|
|
|
|
|
|
thumbExists, _ := utils.FileExists(thumbPath)
|
|
|
|
normalExists, _ := utils.FileExists(normalPath)
|
2019-10-17 13:50:30 +00:00
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
if thumbExists && normalExists {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-10-17 23:17:51 +00:00
|
|
|
if probeResult == nil {
|
2019-10-18 17:35:53 +00:00
|
|
|
var err error
|
2021-01-07 00:38:30 +00:00
|
|
|
probeResult, err = ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension)
|
2019-10-17 13:50:30 +00:00
|
|
|
|
2019-10-17 23:17:51 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
2019-10-17 13:50:30 +00:00
|
|
|
}
|
2019-10-18 17:35:53 +00:00
|
|
|
logger.Infof("Regenerating images for %s", t.FilePath)
|
2019-10-17 13:50:30 +00:00
|
|
|
}
|
|
|
|
|
2020-03-11 21:34:04 +00:00
|
|
|
at := float64(probeResult.Duration) * 0.2
|
|
|
|
|
2019-10-17 23:17:51 +00:00
|
|
|
if !thumbExists {
|
|
|
|
logger.Debugf("Creating thumbnail for %s", t.FilePath)
|
2020-03-11 21:34:04 +00:00
|
|
|
makeScreenshot(*probeResult, thumbPath, 5, 320, at)
|
2019-10-17 23:17:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if !normalExists {
|
|
|
|
logger.Debugf("Creating screenshot for %s", t.FilePath)
|
2020-03-11 21:34:04 +00:00
|
|
|
makeScreenshot(*probeResult, normalPath, 2, probeResult.Width, at)
|
2019-02-09 12:30:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) {
|
|
|
|
err := walkGalleryZip(zipGallery.Path.String, func(file *zip.File) error {
|
|
|
|
// copy this task and change the filename
|
|
|
|
subTask := *t
|
|
|
|
|
|
|
|
// filepath is the zip file and the internal file name, separated by a null byte
|
|
|
|
subTask.FilePath = image.ZipFilename(zipGallery.Path.String, file.Name)
|
|
|
|
subTask.zipGallery = zipGallery
|
|
|
|
|
|
|
|
// run the subtask and wait for it to complete
|
Toward better context handling (#1835)
* Use the request context
The code uses context.Background() in a flow where there is a
http.Request. Use the requests context instead.
* Use a true context in the plugin example
Let AddTag/RemoveTag take a context and use that context throughout
the example.
* Avoid the use of context.Background
Prefer context.TODO over context.Background deep in the call chain.
This marks the site as something which we need to context-handle
later, and also makes it clear to the reader that the context is
sort-of temporary in the code base.
While here, be consistent in handling the `act` variable in each
branch of the if .. { .. } .. check.
* Prefer context.TODO over context.Background
For the different scraping operations here, there is a context
higher up the call chain, which we ought to use. Mark the call-sites
as TODO for now, so we can come back later on a sweep of which parts
can be context-lifted.
* Thread context upwards
Initialization requires context for transactions. Thread the context
upward the call chain.
At the intialization call, add a context.TODO since we can't break this
yet. The singleton assumption prevents us from pulling it up into main for
now.
* make tasks context-aware
Change the task interface to understand contexts.
Pass the context down in some of the branches where it is needed.
* Make QueryStashBoxScene context-aware
This call naturally sits inside the request-context. Use it.
* Introduce a context in the JS plugin code
This allows us to use a context for HTTP calls inside the system.
Mark the context with a TODO at top level for now.
* Nitpick error formatting
Use %v rather than %s for error interfaces.
Do not begin an error strong with a capital letter.
* Avoid the use of http.Get in FFMPEG download chain
Since http.Get has no context, it isn't possible to break out or have
policy induced. The call will block until the GET completes. Rewrite
to use a http Request and provide a context.
Thread the context through the call chain for now. provide
context.TODO() at the top level of the initialization chain.
* Make getRemoteCDPWSAddress aware of contexts
Eliminate a call to http.Get and replace it with a context-aware
variant.
Push the context upwards in the call chain, but plug it before the
scraper interface so we don't have to rewrite said interface yet.
Plugged with context.TODO()
* Scraper: make the getImage function context-aware
Use a context, and pass it upwards. Plug it with context.TODO()
up the chain before the rewrite gets too much out of hand for now.
Minor tweaks along the way, remove a call to context.Background()
deep in the call chain.
* Make NOTIFY request context-aware
The call sits inside a Request-handler. So it's natural to use the
requests context as the context for the outgoing HTTP request.
* Use a context in the url scraper code
We are sitting in code which has a context, so utilize it for the
request as well.
* Use a context when checking versions
When we check the version of stash on Github, use a context. Thread
the context up to the initialization routine of the HTTP/GraphQL
server and plug it with a context.TODO() for now.
This paves the way for providing a context to the HTTP server code in a
future patch.
* Make utils func ReadImage context-aware
In almost all of the cases, there is a context in the call chain which
is a natural use. This is true for all the GraphQL mutations.
The exception is in task_stash_box_tag, so plug that task with
context.TODO() for now.
* Make stash-box get context-aware
Thread a context through the call chain until we hit the Client API.
Plug it with context.TODO() there for now.
* Enable the noctx linter
The code is now free of any uncontexted HTTP request. This means we
pass the noctx linter, and we can enable it in the code base.
2021-10-14 04:32:41 +00:00
|
|
|
subTask.Start(context.TODO())
|
2020-10-12 23:12:46 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) {
|
2021-01-18 01:23:20 +00:00
|
|
|
var images []*models.Image
|
|
|
|
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
|
|
iqb := r.Image()
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
var err error
|
|
|
|
images, err = iqb.FindByGalleryID(zipGallery.ID)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-10-12 23:12:46 +00:00
|
|
|
logger.Warnf("failed to find gallery images: %s", err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, img := range images {
|
|
|
|
t.generateThumbnail(img)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ScanTask) scanImage() {
|
2021-01-18 01:23:20 +00:00
|
|
|
var i *models.Image
|
|
|
|
|
|
|
|
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
|
|
var err error
|
|
|
|
i, err = r.Image().FindByPath(t.FilePath)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-11-04 23:26:51 +00:00
|
|
|
|
|
|
|
fileModTime, err := image.GetFileModTime(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
if i != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
// if file mod time is not set, set it now
|
|
|
|
if !i.FileModTime.Valid {
|
2021-01-18 01:23:20 +00:00
|
|
|
logger.Infof("setting file modification time on %s", t.FilePath)
|
2020-11-04 23:26:51 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
qb := r.Image()
|
|
|
|
if _, err := image.UpdateFileModTime(qb, i.ID, models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// update our copy of the gallery
|
|
|
|
var err error
|
|
|
|
i, err = qb.Find(i.ID)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if the mod time of the file is different than that of the associated
|
|
|
|
// image, then recalculate the checksum and regenerate the thumbnail
|
|
|
|
modified := t.isFileModified(fileModTime, i.FileModTime)
|
|
|
|
if modified {
|
|
|
|
i, err = t.rescanImage(i, fileModTime)
|
|
|
|
if err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
// We already have this item in the database
|
|
|
|
// check for thumbnails
|
|
|
|
t.generateThumbnail(i)
|
2021-01-18 01:23:20 +00:00
|
|
|
} else {
|
|
|
|
var checksum string
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
|
|
|
|
checksum, err = t.calculateImageChecksum()
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error calculating checksum for %s: %s", t.FilePath, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
// check for scene by checksum and oshash - MD5 should be
|
|
|
|
// redundant, but check both
|
|
|
|
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
|
|
var err error
|
|
|
|
i, err = r.Image().FindByChecksum(checksum)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if i != nil {
|
|
|
|
exists := image.FileExists(i.Path)
|
2021-06-11 05:25:09 +00:00
|
|
|
if !t.CaseSensitiveFs {
|
|
|
|
// #1426 - if file exists but is a case-insensitive match for the
|
|
|
|
// original filename, then treat it as a move
|
|
|
|
if exists && strings.EqualFold(t.FilePath, i.Path) {
|
|
|
|
exists = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if exists {
|
|
|
|
logger.Infof("%s already exists. Duplicate of %s ", image.PathDisplayName(t.FilePath), image.PathDisplayName(i.Path))
|
|
|
|
} else {
|
|
|
|
logger.Infof("%s already exists. Updating path...", image.PathDisplayName(t.FilePath))
|
|
|
|
imagePartial := models.ImagePartial{
|
|
|
|
ID: i.ID,
|
|
|
|
Path: &t.FilePath,
|
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
_, err := r.Image().Update(imagePartial)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2021-06-11 07:24:58 +00:00
|
|
|
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, i.ID, plugin.ImageUpdatePost, nil, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
} else {
|
2021-01-18 01:23:20 +00:00
|
|
|
logger.Infof("%s doesn't exist. Creating new item...", image.PathDisplayName(t.FilePath))
|
|
|
|
currentTime := time.Now()
|
|
|
|
newImage := models.Image{
|
|
|
|
Checksum: checksum,
|
|
|
|
Path: t.FilePath,
|
|
|
|
FileModTime: models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
}
|
2021-05-03 04:21:51 +00:00
|
|
|
newImage.Title.String = image.GetFilename(&newImage, t.StripFileExtension)
|
|
|
|
newImage.Title.Valid = true
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := image.SetFileDetails(&newImage); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
var err error
|
|
|
|
i, err = r.Image().Create(newImage)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
2021-06-11 07:24:58 +00:00
|
|
|
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, i.ID, plugin.ImageCreatePost, nil, nil)
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if t.zipGallery != nil {
|
|
|
|
// associate with gallery
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
return gallery.AddImage(r.Gallery(), t.zipGallery.ID, i.ID)
|
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2021-04-11 23:31:33 +00:00
|
|
|
} else if config.GetInstance().GetCreateGalleriesFromFolders() {
|
2020-10-12 23:12:46 +00:00
|
|
|
// create gallery from folder or associate with existing gallery
|
|
|
|
logger.Infof("Associating image %s with folder gallery", i.Path)
|
2021-09-19 00:28:34 +00:00
|
|
|
var galleryID int
|
|
|
|
var isNewGallery bool
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
2021-09-19 00:28:34 +00:00
|
|
|
var err error
|
|
|
|
galleryID, isNewGallery, err = t.associateImageWithFolderGallery(i.ID, r.Gallery())
|
|
|
|
return err
|
2021-01-18 01:23:20 +00:00
|
|
|
}); err != nil {
|
|
|
|
logger.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
2021-09-19 00:28:34 +00:00
|
|
|
|
|
|
|
if isNewGallery {
|
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, galleryID, plugin.GalleryCreatePost, nil, nil)
|
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
if i != nil {
|
|
|
|
t.generateThumbnail(i)
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.Image, error) {
|
|
|
|
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
|
|
|
|
|
|
|
oldChecksum := i.Checksum
|
|
|
|
|
|
|
|
// update the checksum and the modification time
|
|
|
|
checksum, err := t.calculateImageChecksum()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// regenerate the file details as well
|
|
|
|
fileDetails, err := image.GetFileDetails(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
currentTime := time.Now()
|
|
|
|
imagePartial := models.ImagePartial{
|
|
|
|
ID: i.ID,
|
|
|
|
Checksum: &checksum,
|
|
|
|
Width: &fileDetails.Width,
|
|
|
|
Height: &fileDetails.Height,
|
|
|
|
Size: &fileDetails.Size,
|
|
|
|
FileModTime: &models.NullSQLiteTimestamp{
|
|
|
|
Timestamp: fileModTime,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
}
|
|
|
|
|
|
|
|
var ret *models.Image
|
2021-01-18 01:23:20 +00:00
|
|
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
|
|
|
var err error
|
|
|
|
ret, err = r.Image().Update(imagePartial)
|
|
|
|
return err
|
|
|
|
}); err != nil {
|
2020-11-04 23:26:51 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove the old thumbnail if the checksum changed - we'll regenerate it
|
|
|
|
if oldChecksum != checksum {
|
|
|
|
err = os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(oldChecksum, models.DefaultGthumbWidth)) // remove cache dir of gallery
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error deleting thumbnail image: %s", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-11 07:24:58 +00:00
|
|
|
GetInstance().PluginCache.ExecutePostHooks(t.ctx, ret.ID, plugin.ImageUpdatePost, nil, nil)
|
|
|
|
|
2020-11-04 23:26:51 +00:00
|
|
|
return ret, nil
|
|
|
|
}
|
|
|
|
|
2021-09-19 00:28:34 +00:00
|
|
|
func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.GalleryReaderWriter) (galleryID int, isNew bool, err error) {
|
2020-10-12 23:12:46 +00:00
|
|
|
// find a gallery with the path specified
|
|
|
|
path := filepath.Dir(t.FilePath)
|
2021-09-19 00:28:34 +00:00
|
|
|
var g *models.Gallery
|
|
|
|
g, err = qb.FindByPath(path)
|
2020-10-12 23:12:46 +00:00
|
|
|
if err != nil {
|
2021-09-19 00:28:34 +00:00
|
|
|
return
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if g == nil {
|
|
|
|
checksum := utils.MD5FromString(path)
|
|
|
|
|
|
|
|
// create the gallery
|
|
|
|
currentTime := time.Now()
|
|
|
|
|
|
|
|
newGallery := models.Gallery{
|
|
|
|
Checksum: checksum,
|
|
|
|
Path: sql.NullString{
|
|
|
|
String: path,
|
|
|
|
Valid: true,
|
|
|
|
},
|
|
|
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
|
|
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
2021-05-03 04:21:51 +00:00
|
|
|
Title: sql.NullString{
|
|
|
|
String: utils.GetNameFromPath(path, false),
|
|
|
|
Valid: true,
|
|
|
|
},
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
logger.Infof("Creating gallery for folder %s", path)
|
2021-01-18 01:23:20 +00:00
|
|
|
g, err = qb.Create(newGallery)
|
2020-10-12 23:12:46 +00:00
|
|
|
if err != nil {
|
2021-09-19 00:28:34 +00:00
|
|
|
return 0, false, err
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
2021-09-19 00:28:34 +00:00
|
|
|
|
|
|
|
isNew = true
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// associate image with gallery
|
2021-01-18 01:23:20 +00:00
|
|
|
err = gallery.AddImage(qb, g.ID, imageID)
|
2021-09-19 00:28:34 +00:00
|
|
|
galleryID = g.ID
|
|
|
|
return
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (t *ScanTask) generateThumbnail(i *models.Image) {
|
2021-09-23 05:22:14 +00:00
|
|
|
if !t.GenerateThumbnails {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
|
|
|
|
exists, _ := utils.FileExists(thumbPath)
|
|
|
|
if exists {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-09-23 05:22:14 +00:00
|
|
|
config, _, err := image.DecodeSourceImage(i)
|
2020-10-12 23:12:46 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error reading image %s: %s", i.Path, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-09-23 05:22:14 +00:00
|
|
|
if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth {
|
|
|
|
encoder := image.NewThumbnailEncoder(instance.FFMPEGPath)
|
|
|
|
data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth)
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = utils.WriteFile(thumbPath, data)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-09 12:30:49 +00:00
|
|
|
func (t *ScanTask) calculateChecksum() (string, error) {
|
2020-08-06 01:21:14 +00:00
|
|
|
logger.Infof("Calculating checksum for %s...", t.FilePath)
|
2019-02-09 12:30:49 +00:00
|
|
|
checksum, err := utils.MD5FromFilePath(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
logger.Debugf("Checksum calculated: %s", checksum)
|
|
|
|
return checksum, nil
|
|
|
|
}
|
2019-11-15 17:23:58 +00:00
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
func (t *ScanTask) calculateImageChecksum() (string, error) {
|
|
|
|
logger.Infof("Calculating checksum for %s...", image.PathDisplayName(t.FilePath))
|
|
|
|
// uses image.CalculateMD5 to read files in zips
|
|
|
|
checksum, err := image.CalculateMD5(t.FilePath)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
logger.Debugf("Checksum calculated: %s", checksum)
|
|
|
|
return checksum, nil
|
|
|
|
}
|
|
|
|
|
2019-11-15 17:23:58 +00:00
|
|
|
func (t *ScanTask) doesPathExist() bool {
|
2021-04-11 23:31:33 +00:00
|
|
|
config := config.GetInstance()
|
2020-10-12 23:12:46 +00:00
|
|
|
vidExt := config.GetVideoExtensions()
|
|
|
|
imgExt := config.GetImageExtensions()
|
|
|
|
gExt := config.GetGalleryExtensions()
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
ret := false
|
2021-09-20 23:34:25 +00:00
|
|
|
txnErr := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
2021-01-18 01:23:20 +00:00
|
|
|
if matchExtension(t.FilePath, gExt) {
|
|
|
|
gallery, _ := r.Gallery().FindByPath(t.FilePath)
|
|
|
|
if gallery != nil {
|
|
|
|
ret = true
|
|
|
|
}
|
|
|
|
} else if matchExtension(t.FilePath, vidExt) {
|
|
|
|
s, _ := r.Scene().FindByPath(t.FilePath)
|
|
|
|
if s != nil {
|
|
|
|
ret = true
|
|
|
|
}
|
|
|
|
} else if matchExtension(t.FilePath, imgExt) {
|
|
|
|
i, _ := r.Image().FindByPath(t.FilePath)
|
|
|
|
if i != nil {
|
|
|
|
ret = true
|
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
return nil
|
|
|
|
})
|
2021-09-20 23:34:25 +00:00
|
|
|
if txnErr != nil {
|
|
|
|
logger.Warnf("error while executing read transaction: %v", txnErr)
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
return ret
|
2019-11-15 17:23:58 +00:00
|
|
|
}
|
2020-10-12 23:12:46 +00:00
|
|
|
|
|
|
|
func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
|
2021-04-11 23:31:33 +00:00
|
|
|
config := config.GetInstance()
|
2020-10-12 23:12:46 +00:00
|
|
|
vidExt := config.GetVideoExtensions()
|
|
|
|
imgExt := config.GetImageExtensions()
|
|
|
|
gExt := config.GetGalleryExtensions()
|
2020-10-26 04:57:58 +00:00
|
|
|
excludeVidRegex := generateRegexps(config.GetExcludes())
|
|
|
|
excludeImgRegex := generateRegexps(config.GetImageExcludes())
|
2020-10-12 23:12:46 +00:00
|
|
|
|
2021-02-23 01:56:01 +00:00
|
|
|
// don't scan zip images directly
|
|
|
|
if image.IsZipPath(s.Path) {
|
|
|
|
logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-11 00:06:04 +00:00
|
|
|
generatedPath := config.GetGeneratedPath()
|
|
|
|
|
2020-10-20 06:00:23 +00:00
|
|
|
return utils.SymWalk(s.Path, func(path string, info os.FileInfo, err error) error {
|
2020-10-13 23:51:36 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Warnf("error scanning %s: %s", path, err.Error())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
if info.IsDir() {
|
2021-02-11 00:06:04 +00:00
|
|
|
// #1102 - ignore files in generated path
|
|
|
|
if utils.IsPathInDir(generatedPath, path) {
|
|
|
|
return filepath.SkipDir
|
|
|
|
}
|
|
|
|
|
2021-05-17 05:46:00 +00:00
|
|
|
// shortcut: skip the directory entirely if it matches both exclusion patterns
|
|
|
|
// add a trailing separator so that it correctly matches against patterns like path/.*
|
|
|
|
pathExcludeTest := path + string(filepath.Separator)
|
|
|
|
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, excludeVidRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, excludeImgRegex)) {
|
|
|
|
return filepath.SkipDir
|
|
|
|
}
|
|
|
|
|
2020-10-12 23:12:46 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-10-26 04:57:58 +00:00
|
|
|
if !s.ExcludeVideo && matchExtension(path, vidExt) && !matchFileRegex(path, excludeVidRegex) {
|
2020-10-12 23:12:46 +00:00
|
|
|
return f(path, info, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !s.ExcludeImage {
|
2020-10-26 04:57:58 +00:00
|
|
|
if (matchExtension(path, imgExt) || matchExtension(path, gExt)) && !matchFileRegex(path, excludeImgRegex) {
|
2020-10-12 23:12:46 +00:00
|
|
|
return f(path, info, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|