mirror of https://github.com/stashapp/stash.git
[Files Refactor] Rollback platform-agnostic paths (#2852)
* Rollback platform agnostic path storage * Add release note for database change
This commit is contained in:
parent
0c7b5cf6a1
commit
7b439556c0
|
@ -214,7 +214,7 @@ func createSceneFile(ctx context.Context, name string, folderStore file.FolderSt
|
|||
}
|
||||
|
||||
if err := fileStore.Create(ctx, f); err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("creating scene file %q: %w", name, err)
|
||||
}
|
||||
|
||||
return f, nil
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package autotag
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
|
@ -28,10 +29,14 @@ func TestPerformerScenes(t *testing.T) {
|
|||
"performer + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
}
|
||||
|
||||
// trailing backslash tests only work where filepath separator is not backslash
|
||||
if filepath.Separator != '\\' {
|
||||
performerNames = append(performerNames, test{
|
||||
`performer + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
for _, p := range performerNames {
|
||||
|
|
|
@ -2,6 +2,7 @@ package autotag
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -34,13 +35,10 @@ func generateNamePatterns(name, separator, ext string) []string {
|
|||
ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("aaa%s%s.%s", separator, name, ext))
|
||||
ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb.%s", separator, name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("dir/%s%saaa.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("dir%sdir/%s%saaa.%s", separator, name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("dir\\%s%saaa.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("%s%saaa/dir/bbb.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("%s%saaa\\dir\\bbb.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("dir/%s%s/aaa.%s", name, separator, ext))
|
||||
ret = append(ret, fmt.Sprintf("dir\\%s%s\\aaa.%s", name, separator, ext))
|
||||
ret = append(ret, filepath.Join("dir", fmt.Sprintf("%s%saaa.%s", name, separator, ext)))
|
||||
ret = append(ret, filepath.Join(fmt.Sprintf("dir%sdir", separator), fmt.Sprintf("%s%saaa.%s", name, separator, ext)))
|
||||
ret = append(ret, filepath.Join(fmt.Sprintf("%s%saaa", name, separator), "dir", fmt.Sprintf("bbb.%s", ext)))
|
||||
ret = append(ret, filepath.Join("dir", fmt.Sprintf("%s%s", name, separator), fmt.Sprintf("aaa.%s", ext)))
|
||||
|
||||
return ret
|
||||
}
|
||||
|
@ -91,8 +89,7 @@ func generateTestPaths(testName, ext string) (scenePatterns []string, falseScene
|
|||
falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("%saaa.%s", testName, ext))
|
||||
|
||||
// add path separator false scenarios
|
||||
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "/", ext)...)
|
||||
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "\\", ext)...)
|
||||
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, string(filepath.Separator), ext)...)
|
||||
|
||||
// split patterns only valid for ._- and whitespace
|
||||
for _, separator := range testSeparators {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package autotag
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
|
@ -18,49 +19,60 @@ type testStudioCase struct {
|
|||
aliasRegex string
|
||||
}
|
||||
|
||||
var testStudioCases = []testStudioCase{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
var (
|
||||
testStudioCases = []testStudioCase{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
trailingBackslashStudioCases = []testStudioCase{
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func TestStudioScenes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, p := range testStudioCases {
|
||||
tc := testStudioCases
|
||||
// trailing backslash tests only work where filepath separator is not backslash
|
||||
if filepath.Separator != '\\' {
|
||||
tc = append(tc, trailingBackslashStudioCases...)
|
||||
}
|
||||
|
||||
for _, p := range tc {
|
||||
testStudioScenes(t, p)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package autotag
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
|
@ -18,49 +19,60 @@ type testTagCase struct {
|
|||
aliasRegex string
|
||||
}
|
||||
|
||||
var testTagCases = []testTagCase{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
var (
|
||||
testTagCases = []testTagCase{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
trailingBackslashCases = []testTagCase{
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func TestTagScenes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, p := range testTagCases {
|
||||
tc := testTagCases
|
||||
// trailing backslash tests only work where filepath separator is not backslash
|
||||
if filepath.Separator != '\\' {
|
||||
tc = append(tc, trailingBackslashCases...)
|
||||
}
|
||||
|
||||
for _, p := range tc {
|
||||
testTagScenes(t, p)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -584,13 +584,11 @@ func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error
|
|||
basename = strings.ReplaceAll(basename, "*", "%")
|
||||
dirName = strings.ReplaceAll(dirName, "*", "%")
|
||||
|
||||
dir, _ := path(dirName).Value()
|
||||
|
||||
table := qb.table()
|
||||
folderTable := folderTableMgr.table
|
||||
|
||||
q := qb.selectDataset().Prepared(true).Where(
|
||||
folderTable.Col("path").Like(dir),
|
||||
folderTable.Col("path").Like(dirName),
|
||||
table.Col("basename").Like(basename),
|
||||
)
|
||||
|
||||
|
@ -607,10 +605,9 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD
|
|||
|
||||
var conds []exp.Expression
|
||||
for _, pp := range p {
|
||||
dir, _ := path(pp).Value()
|
||||
dirWildcard, _ := path(pp + string(filepath.Separator) + "%").Value()
|
||||
ppWildcard := pp + string(filepath.Separator) + "%"
|
||||
|
||||
conds = append(conds, folderTable.Col("path").Eq(dir), folderTable.Col("path").Like(dirWildcard))
|
||||
conds = append(conds, folderTable.Col("path").Eq(pp), folderTable.Col("path").Like(ppWildcard))
|
||||
}
|
||||
|
||||
return q.Where(
|
||||
|
|
|
@ -446,13 +446,13 @@ func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, bas
|
|||
f.setError(err)
|
||||
return
|
||||
}
|
||||
f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?) OR (%s IS NOT NULL AND %[2]s regexp ?)", pathColumn, basenameColumn), c.Value, c.Value)
|
||||
f.addWhere(fmt.Sprintf("%s IS NOT NULL AND %s IS NOT NULL AND %[1]s || '%[3]s' || %[2]s regexp ?", pathColumn, basenameColumn, string(filepath.Separator)), c.Value)
|
||||
case models.CriterionModifierNotMatchesRegex:
|
||||
if _, err := regexp.Compile(c.Value); err != nil {
|
||||
f.setError(err)
|
||||
return
|
||||
}
|
||||
f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?) AND (%s IS NULL OR %[2]s NOT regexp ?)", pathColumn, basenameColumn), c.Value, c.Value)
|
||||
f.addWhere(fmt.Sprintf("%s IS NULL OR %s IS NULL OR %[1]s || '%[3]s' || %[2]s NOT regexp ?", pathColumn, basenameColumn, string(filepath.Separator)), c.Value)
|
||||
case models.CriterionModifierIsNull:
|
||||
f.addWhere(fmt.Sprintf("(%s IS NULL OR TRIM(%[1]s) = '' OR %s IS NULL OR TRIM(%[2]s) = '')", pathColumn, basenameColumn))
|
||||
case models.CriterionModifierNotNull:
|
||||
|
@ -470,7 +470,7 @@ func getPathSearchClause(pathColumn, basenameColumn, p string, addWildcards, not
|
|||
// directory plus basename
|
||||
hasSlashes := strings.Contains(p, string(filepath.Separator))
|
||||
trailingSlash := hasSlashes && p[len(p)-1] == filepath.Separator
|
||||
const emptyDir = "/"
|
||||
const emptyDir = string(filepath.Separator)
|
||||
|
||||
// possible values:
|
||||
// dir/basename
|
||||
|
@ -480,8 +480,7 @@ func getPathSearchClause(pathColumn, basenameColumn, p string, addWildcards, not
|
|||
// dirOrBasename
|
||||
|
||||
basename := filepath.Base(p)
|
||||
dir := path(filepath.Dir(p)).String()
|
||||
p = path(p).String()
|
||||
dir := filepath.Dir(p)
|
||||
|
||||
if addWildcards {
|
||||
p = "%" + p + "%"
|
||||
|
|
|
@ -3,7 +3,6 @@ package sqlite
|
|||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
@ -18,41 +17,19 @@ import (
|
|||
|
||||
const folderTable = "folders"
|
||||
|
||||
// path stores file paths in a platform-agnostic format and converts to platform-specific format for actual use.
|
||||
type path string
|
||||
|
||||
func (p *path) Scan(value interface{}) error {
|
||||
v, ok := value.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid path type %T", value)
|
||||
}
|
||||
|
||||
*p = path(filepath.FromSlash(v))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p path) String() string {
|
||||
return filepath.ToSlash(string(p))
|
||||
}
|
||||
|
||||
func (p path) Value() (driver.Value, error) {
|
||||
return p.String(), nil
|
||||
}
|
||||
|
||||
type folderRow struct {
|
||||
ID file.FolderID `db:"id" goqu:"skipinsert"`
|
||||
// Path is stored in the OS-agnostic slash format
|
||||
Path path `db:"path"`
|
||||
ZipFileID null.Int `db:"zip_file_id"`
|
||||
ParentFolderID null.Int `db:"parent_folder_id"`
|
||||
ModTime time.Time `db:"mod_time"`
|
||||
CreatedAt time.Time `db:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at"`
|
||||
ID file.FolderID `db:"id" goqu:"skipinsert"`
|
||||
Path string `db:"path"`
|
||||
ZipFileID null.Int `db:"zip_file_id"`
|
||||
ParentFolderID null.Int `db:"parent_folder_id"`
|
||||
ModTime time.Time `db:"mod_time"`
|
||||
CreatedAt time.Time `db:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at"`
|
||||
}
|
||||
|
||||
func (r *folderRow) fromFolder(o file.Folder) {
|
||||
r.ID = o.ID
|
||||
r.Path = path(o.Path)
|
||||
r.Path = o.Path
|
||||
r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID)
|
||||
r.ParentFolderID = nullIntFromFolderIDPtr(o.ParentFolderID)
|
||||
r.ModTime = o.ModTime
|
||||
|
@ -246,9 +223,7 @@ func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder
|
|||
}
|
||||
|
||||
func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, error) {
|
||||
dir, _ := path(p).Value()
|
||||
|
||||
q := qb.selectDataset().Prepared(true).Where(qb.table().Col("path").Eq(dir))
|
||||
q := qb.selectDataset().Prepared(true).Where(qb.table().Col("path").Eq(p))
|
||||
|
||||
ret, err := qb.get(ctx, q)
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
|
@ -274,10 +249,9 @@ func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.Selec
|
|||
|
||||
var conds []exp.Expression
|
||||
for _, pp := range p {
|
||||
dir, _ := path(pp).Value()
|
||||
dirWildcard, _ := path(pp + string(filepath.Separator) + "%").Value()
|
||||
ppWildcard := pp + string(filepath.Separator) + "%"
|
||||
|
||||
conds = append(conds, table.Col("path").Eq(dir), table.Col("path").Like(dirWildcard))
|
||||
conds = append(conds, table.Col("path").Eq(pp), table.Col("path").Like(ppWildcard))
|
||||
}
|
||||
|
||||
return q.Where(
|
||||
|
|
|
@ -438,8 +438,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
|
|||
foldersTable := folderTableMgr.table
|
||||
|
||||
basename := filepath.Base(p)
|
||||
dir, _ := path(filepath.Dir(p)).Value()
|
||||
pp, _ := path(p).Value()
|
||||
dir := filepath.Dir(p)
|
||||
|
||||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
|
@ -459,7 +458,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
|
|||
fileFoldersTable.Col("path").Eq(dir),
|
||||
filesTable.Col("basename").Eq(basename),
|
||||
),
|
||||
foldersTable.Col("path").Eq(pp),
|
||||
foldersTable.Col("path").Eq(p),
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ import (
|
|||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -52,61 +51,7 @@ type schema32Migrator struct {
|
|||
folderCache map[string]folderInfo
|
||||
}
|
||||
|
||||
func (m *schema32Migrator) migrateFolderSlashes(ctx context.Context) error {
|
||||
logger.Infof("Migrating folder slashes")
|
||||
const query = "SELECT `folders`.`id`, `folders`.`path` FROM `folders`"
|
||||
|
||||
rows, err := m.db.Query(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var id int
|
||||
var p string
|
||||
|
||||
err := rows.Scan(&id, &p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
convertedPath := filepath.ToSlash(p)
|
||||
|
||||
_, err = m.db.Exec("UPDATE `folders` SET `path` = ? WHERE `id` = ?", convertedPath, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// dir returns all but the last element of path, typically the path's directory.
|
||||
// After dropping the final element using Split, the path is Cleaned and trailing
|
||||
// slashes are removed.
|
||||
//
|
||||
// This is a re-implementation of path.Dir which changes double slash prefixed
|
||||
// paths into single slash.
|
||||
func dir(p string) string {
|
||||
parent := path.Dir(p)
|
||||
// restore the double slash
|
||||
if strings.HasPrefix(p, "//") && len(parent) > 1 {
|
||||
parent = "/" + parent
|
||||
}
|
||||
|
||||
return parent
|
||||
}
|
||||
|
||||
func (m *schema32Migrator) migrateFolders(ctx context.Context) error {
|
||||
if err := m.migrateFolderSlashes(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Infof("Migrating folders")
|
||||
|
||||
const query = "SELECT `folders`.`id`, `folders`.`path` FROM `folders` INNER JOIN `galleries` ON `galleries`.`folder_id` = `folders`.`id`"
|
||||
|
@ -126,7 +71,7 @@ func (m *schema32Migrator) migrateFolders(ctx context.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
parent := dir(p)
|
||||
parent := filepath.Dir(p)
|
||||
parentID, zipFileID, err := m.createFolderHierarchy(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -198,9 +143,8 @@ func (m *schema32Migrator) migrateFiles(ctx context.Context) error {
|
|||
p = strings.ReplaceAll(p, legacyZipSeparator, string(filepath.Separator))
|
||||
}
|
||||
|
||||
convertedPath := filepath.ToSlash(p)
|
||||
parent := dir(convertedPath)
|
||||
basename := path.Base(convertedPath)
|
||||
parent := filepath.Dir(p)
|
||||
basename := filepath.Base(p)
|
||||
if parent != "." {
|
||||
parentID, zipFileID, err := m.createFolderHierarchy(parent)
|
||||
if err != nil {
|
||||
|
@ -270,9 +214,9 @@ func (m *schema32Migrator) deletePlaceholderFolder(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (m *schema32Migrator) createFolderHierarchy(p string) (*int, sql.NullInt64, error) {
|
||||
parent := dir(p)
|
||||
parent := filepath.Dir(p)
|
||||
|
||||
if parent == "." || parent == "/" {
|
||||
if parent == "." || parent == string(filepath.Separator) {
|
||||
// get or create this folder
|
||||
return m.getOrCreateFolder(p, nil, sql.NullInt64{})
|
||||
}
|
||||
|
|
|
@ -491,13 +491,11 @@ func (qb *SceneStore) FindByPath(ctx context.Context, p string) ([]*models.Scene
|
|||
filesTable := fileTableMgr.table
|
||||
foldersTable := folderTableMgr.table
|
||||
basename := filepath.Base(p)
|
||||
dirStr := filepath.Dir(p)
|
||||
dir := filepath.Dir(p)
|
||||
|
||||
// replace wildcards
|
||||
basename = strings.ReplaceAll(basename, "*", "%")
|
||||
dirStr = strings.ReplaceAll(dirStr, "*", "%")
|
||||
|
||||
dir, _ := path(dirStr).Value()
|
||||
dir = strings.ReplaceAll(dir, "*", "%")
|
||||
|
||||
sq := dialect.From(scenesFilesJoinTable).InnerJoin(
|
||||
filesTable,
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
### **Warning:** Windows users will need to re-migrate from schema version 31 if they are upgrading from an older `files-refactor` build, or manually change the path separators in the `folders` table from `/` to `\` in the database.
|
|
@ -1,4 +1,5 @@
|
|||
import v0170 from "./v0170.md";
|
||||
import r20220826 from "./20220826.md";
|
||||
|
||||
export type Module = typeof v0170;
|
||||
|
||||
|
@ -13,4 +14,8 @@ export const releaseNotes: IReleaseNotes[] = [
|
|||
date: 20220801,
|
||||
content: v0170,
|
||||
},
|
||||
{
|
||||
date: 20220826,
|
||||
content: r20220826,
|
||||
},
|
||||
];
|
||||
|
|
Loading…
Reference in New Issue