mirror of https://github.com/perkeep/perkeep.git
importer/flickr: make test data for devcam
A few renamings in flickr.go too. http://camlistore.org/issue/417 Change-Id: Ied28cfadc7a546f34b87f9a43462f82ee4cb71d6
This commit is contained in:
parent
7e86255cac
commit
409ec362e3
|
@ -40,6 +40,10 @@ const (
|
|||
resourceOwnerAuthorizationURL = "https://www.flickr.com/services/oauth/authorize"
|
||||
tokenRequestURL = "https://www.flickr.com/services/oauth/access_token"
|
||||
|
||||
photosetsAPIPath = "flickr.photosets.getList"
|
||||
photosetAPIPath = "flickr.photosets.getPhotos"
|
||||
photosAPIPath = "flickr.people.getPhotos"
|
||||
|
||||
attrFlickrId = "flickrId"
|
||||
)
|
||||
|
||||
|
@ -137,38 +141,36 @@ func (imp) Run(ctx *importer.RunContext) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
type photosetsGetList struct {
|
||||
Photosets struct {
|
||||
Page int
|
||||
Pages int
|
||||
Perpage int
|
||||
Photoset []*photosetsGetListItem
|
||||
}
|
||||
type photosetList struct {
|
||||
Page int
|
||||
Pages int
|
||||
PerPage int
|
||||
Photoset []*photosetInfo
|
||||
}
|
||||
|
||||
type photosetsGetListItem struct {
|
||||
ID string `json:"id"`
|
||||
PrimaryPhotoID string `json:"primary"`
|
||||
type photosetInfo struct {
|
||||
Id string `json:"id"`
|
||||
PrimaryPhotoId string `json:"primary"`
|
||||
Title contentString
|
||||
Description contentString
|
||||
}
|
||||
|
||||
type photosetsGetPhotos struct {
|
||||
Photoset struct {
|
||||
ID string `json:"id"`
|
||||
Page int `json:",string"`
|
||||
Pages int
|
||||
Photo []struct {
|
||||
ID string
|
||||
Originalformat string
|
||||
}
|
||||
type photosetItems struct {
|
||||
Id string `json:"id"`
|
||||
Page int `json:",string"`
|
||||
Pages int
|
||||
Photo []struct {
|
||||
Id string
|
||||
OriginalFormat string
|
||||
}
|
||||
}
|
||||
|
||||
func (r *run) importPhotosets() error {
|
||||
resp := photosetsGetList{}
|
||||
resp := struct {
|
||||
Photosets photosetList
|
||||
}{}
|
||||
if err := r.oauthContext().flickrAPIRequest(&resp,
|
||||
"flickr.photosets.getList", "user_id", r.userID); err != nil {
|
||||
photosetsAPIPath, "user_id", r.userID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -186,7 +188,7 @@ func (r *run) importPhotosets() error {
|
|||
for page := 1; page >= 1; {
|
||||
page, err = r.importPhotoset(setsNode, item, page)
|
||||
if err != nil {
|
||||
log.Printf("Flickr importer: error importing photoset %s: %s", item.ID, err)
|
||||
log.Printf("Flickr importer: error importing photoset %s: %s", item.Id, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -194,27 +196,29 @@ func (r *run) importPhotosets() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (r *run) importPhotoset(parent *importer.Object, photoset *photosetsGetListItem, page int) (int, error) {
|
||||
photosetNode, err := parent.ChildPathObject(photoset.ID)
|
||||
func (r *run) importPhotoset(parent *importer.Object, photoset *photosetInfo, page int) (int, error) {
|
||||
photosetNode, err := parent.ChildPathObject(photoset.Id)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if err := photosetNode.SetAttrs(
|
||||
attrFlickrId, photoset.ID,
|
||||
attrFlickrId, photoset.Id,
|
||||
nodeattr.Title, photoset.Title.Content,
|
||||
nodeattr.Description, photoset.Description.Content,
|
||||
importer.AttrPrimaryImageOfPage, photoset.PrimaryPhotoID); err != nil {
|
||||
importer.AttrPrimaryImageOfPage, photoset.PrimaryPhotoId); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
resp := photosetsGetPhotos{}
|
||||
if err := r.oauthContext().flickrAPIRequest(&resp, "flickr.photosets.getPhotos", "user_id", r.userID,
|
||||
"page", fmt.Sprintf("%d", page), "photoset_id", photoset.ID, "extras", "original_format"); err != nil {
|
||||
resp := struct {
|
||||
Photoset photosetItems
|
||||
}{}
|
||||
if err := r.oauthContext().flickrAPIRequest(&resp, photosetAPIPath, "user_id", r.userID,
|
||||
"page", fmt.Sprintf("%d", page), "photoset_id", photoset.Id, "extras", "original_format"); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
log.Printf("Importing page %d from photoset %s", page, photoset.ID)
|
||||
log.Printf("Importing page %d from photoset %s", page, photoset.Id)
|
||||
|
||||
photosNode, err := r.getPhotosNode()
|
||||
if err != nil {
|
||||
|
@ -222,16 +226,16 @@ func (r *run) importPhotoset(parent *importer.Object, photoset *photosetsGetList
|
|||
}
|
||||
|
||||
for _, item := range resp.Photoset.Photo {
|
||||
filename := fmt.Sprintf("%s.%s", item.ID, item.Originalformat)
|
||||
filename := fmt.Sprintf("%s.%s", item.Id, item.OriginalFormat)
|
||||
photoNode, err := photosNode.ChildPathObject(filename)
|
||||
if err != nil {
|
||||
log.Printf("Flickr importer: error finding photo node %s for addition to photoset %s: %s",
|
||||
item.ID, photoset.ID, err)
|
||||
item.Id, photoset.Id, err)
|
||||
continue
|
||||
}
|
||||
if err := photosetNode.SetAttr("camliPath:"+filename, photoNode.PermanodeRef().String()); err != nil {
|
||||
log.Printf("Flickr importer: error adding photo %s to photoset %s: %s",
|
||||
item.ID, photoset.ID, err)
|
||||
item.Id, photoset.Id, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,7 +251,7 @@ type photosSearch struct {
|
|||
Page int
|
||||
Pages int
|
||||
Perpage int
|
||||
Total int `json:",string"`
|
||||
Total int
|
||||
Photo []*photosSearchItem
|
||||
}
|
||||
|
||||
|
@ -255,7 +259,7 @@ type photosSearch struct {
|
|||
}
|
||||
|
||||
type photosSearchItem struct {
|
||||
ID string `json:"id"`
|
||||
Id string `json:"id"`
|
||||
Title string
|
||||
IsPublic int
|
||||
IsFriend int
|
||||
|
@ -264,7 +268,7 @@ type photosSearchItem struct {
|
|||
DateUpload string // Unix timestamp, in GMT.
|
||||
DateTaken string // formatted as "2006-01-02 15:04:05", so no timezone info.
|
||||
OriginalFormat string
|
||||
LastUpdate string
|
||||
LastUpdate string // Unix timestamp.
|
||||
Latitude float32
|
||||
Longitude float32
|
||||
Tags string
|
||||
|
@ -291,8 +295,8 @@ func (r *run) importPhotos() error {
|
|||
|
||||
func (r *run) importPhotosPage(page int) (int, error) {
|
||||
resp := photosSearch{}
|
||||
if err := r.oauthContext().flickrAPIRequest(&resp, "flickr.people.getPhotos", "user_id", r.userID, "page", fmt.Sprintf("%d", page),
|
||||
"extras", "description, date_upload, date_taken, original_format, last_update, geo, tags, machine_tags, views, media, url_o"); err != nil {
|
||||
if err := r.oauthContext().flickrAPIRequest(&resp, photosAPIPath, "user_id", r.userID, "page", fmt.Sprintf("%d", page),
|
||||
"extras", "description,date_upload,date_taken,original_format,last_update,geo,tags,machine_tags,views,media,url_o"); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
|
@ -304,7 +308,7 @@ func (r *run) importPhotosPage(page int) (int, error) {
|
|||
|
||||
for _, item := range resp.Photos.Photo {
|
||||
if err := r.importPhoto(photosNode, item); err != nil {
|
||||
log.Printf("Flickr importer: error importing %s: %s", item.ID, err)
|
||||
log.Printf("Flickr importer: error importing %s: %s", item.Id, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -324,7 +328,7 @@ func (r *run) importPhotosPage(page int) (int, error) {
|
|||
// * Conflicts: For all metadata changes, prefer any non-imported claims
|
||||
// * Test!
|
||||
func (r *run) importPhoto(parent *importer.Object, photo *photosSearchItem) error {
|
||||
filename := fmt.Sprintf("%s.%s", photo.ID, photo.OriginalFormat)
|
||||
filename := fmt.Sprintf("%s.%s", photo.Id, photo.OriginalFormat)
|
||||
photoNode, err := parent.ChildPathObject(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -334,16 +338,16 @@ func (r *run) importPhoto(parent *importer.Object, photo *photosSearchItem) erro
|
|||
dateTaken, err := time.ParseInLocation("2006-01-02 15:04:05", photo.DateTaken, schema.UnknownLocation)
|
||||
if err != nil {
|
||||
// default to the published date otherwise
|
||||
log.Printf("Flickr importer: problem with date taken of photo %v, defaulting to published date instead.", photo.ID)
|
||||
log.Printf("Flickr importer: problem with date taken of photo %v, defaulting to published date instead.", photo.Id)
|
||||
seconds, err := strconv.ParseInt(photo.DateUpload, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not parse date upload time %q for image %v: %v", photo.DateUpload, photo.ID, err)
|
||||
return fmt.Errorf("could not parse date upload time %q for image %v: %v", photo.DateUpload, photo.Id, err)
|
||||
}
|
||||
dateTaken = time.Unix(seconds, 0)
|
||||
}
|
||||
|
||||
attrs := []string{
|
||||
attrFlickrId, photo.ID,
|
||||
attrFlickrId, photo.Id,
|
||||
nodeattr.DateCreated, schema.RFC3339FromTime(dateTaken),
|
||||
nodeattr.Description, photo.Description.Content,
|
||||
}
|
||||
|
@ -360,13 +364,13 @@ func (r *run) importPhoto(parent *importer.Object, photo *photosSearchItem) erro
|
|||
// lastupdate is a Unix timestamp according to https://www.flickr.com/services/api/flickr.photos.getInfo.html
|
||||
seconds, err := strconv.ParseInt(photo.LastUpdate, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not parse lastupdate time for image %v: %v", photo.ID, err)
|
||||
return fmt.Errorf("could not parse lastupdate time for image %v: %v", photo.Id, err)
|
||||
}
|
||||
lastUpdate := time.Unix(seconds, 0)
|
||||
if lastUpdateString := photoNode.Attr(importer.AttrLastReviewed); lastUpdateString != "" {
|
||||
oldLastUpdate, err := time.Parse(time.RFC3339, lastUpdateString)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not parse last stored update time for image %v: %v", photo.ID, err)
|
||||
return fmt.Errorf("could not parse last stored update time for image %v: %v", photo.Id, err)
|
||||
}
|
||||
if lastUpdate.Equal(oldLastUpdate) {
|
||||
return nil
|
||||
|
|
|
@ -0,0 +1,288 @@
|
|||
/*
|
||||
Copyright 2014 The Camlistore Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package flickr
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"camlistore.org/pkg/blob"
|
||||
"camlistore.org/pkg/httputil"
|
||||
"camlistore.org/pkg/importer"
|
||||
"camlistore.org/pkg/osutil"
|
||||
)
|
||||
|
||||
var _ importer.TestDataMaker = imp{}
|
||||
|
||||
func (im imp) SetTestAccount(acctNode *importer.Object) error {
|
||||
return acctNode.SetAttrs(
|
||||
importer.AcctAttrAccessToken, "fakeAccessToken",
|
||||
importer.AcctAttrAccessTokenSecret, "fakeAccessSecret",
|
||||
importer.AcctAttrUserID, "fakeUserId",
|
||||
importer.AcctAttrName, "fakeName",
|
||||
importer.AcctAttrUserName, "fakeScreenName",
|
||||
)
|
||||
}
|
||||
|
||||
func (im imp) MakeTestData() http.RoundTripper {
|
||||
const (
|
||||
nPhotosets = 5 // Arbitrary number of sets.
|
||||
perPage = 3 // number of photos per page (both when getting sets and when getting photos).
|
||||
fakeUserId = "fakeUserId"
|
||||
)
|
||||
// Photoset N has N photos, so we've got 15 ( = 5 + 4 + 3 + 2 + 1) photos in total.
|
||||
var nPhotos int
|
||||
for i := 1; i <= nPhotosets; i++ {
|
||||
nPhotos += i
|
||||
}
|
||||
nPhotosPages := nPhotos / perPage
|
||||
if nPhotos%perPage != 0 {
|
||||
nPhotosPages++
|
||||
}
|
||||
|
||||
okHeader := `HTTP/1.1 200 OK
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
|
||||
`
|
||||
|
||||
// TODO(mpl): this scheme does not take into account that we could have the same photo
|
||||
// in different albums. These two photos will end up with a different photoId.
|
||||
buildPhotoIds := func(nsets, perPage int) []string {
|
||||
var ids []string
|
||||
for i := 1; i <= nsets; i++ {
|
||||
photosetId := blob.RefFromString(fmt.Sprintf("Photoset %d", i)).DigestPrefix(10)
|
||||
page := 1
|
||||
// Photoset N has N photos.
|
||||
indexOnPage := 1
|
||||
for j := 1; j <= i; j++ {
|
||||
photoId := blob.RefFromString(fmt.Sprintf("Photo %d on page %d of photoset %s", indexOnPage, page, photosetId)).DigestPrefix(10)
|
||||
ids = append(ids, photoId)
|
||||
indexOnPage++
|
||||
if indexOnPage > perPage {
|
||||
page++
|
||||
indexOnPage = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
photoIds := buildPhotoIds(nPhotosets, perPage)
|
||||
|
||||
responses := make(map[string]func() *http.Response)
|
||||
// Initial photo sets list
|
||||
photosetsURL := fmt.Sprintf("%s?format=json&method=%s&nojsoncallback=1&user_id=%s", apiURL, photosetsAPIPath, fakeUserId)
|
||||
response := fmt.Sprintf("%s%s", okHeader, fakePhotosetsList(nPhotosets))
|
||||
responses[photosetsURL] = httputil.StaticResponder(response)
|
||||
|
||||
// All the photoset calls. One call for each page of each photoset.
|
||||
// Each page as perPage photos, or maybe less if end of the photoset.
|
||||
{
|
||||
pageStart := 0
|
||||
albumEnd, pageEnd, albumNum, pages, page := 1, 1, 1, 1, 1
|
||||
photosetId := blob.RefFromString(fmt.Sprintf("Photoset %d", albumNum)).DigestPrefix(10)
|
||||
photosURL := fmt.Sprintf("%s?extras=original_format&format=json&method=%s&nojsoncallback=1&page=%d&photoset_id=%s&user_id=%s",
|
||||
apiURL, photosetAPIPath, page, photosetId, fakeUserId)
|
||||
response := fmt.Sprintf("%s%s", okHeader, fakePhotoset(photosetId, page, pages, photoIds[pageStart:pageEnd]))
|
||||
responses[photosURL] = httputil.StaticResponder(response)
|
||||
for k, _ := range photoIds {
|
||||
if k < pageEnd {
|
||||
continue
|
||||
}
|
||||
page++
|
||||
pageStart = k
|
||||
pageEnd = k + perPage
|
||||
if page > pages {
|
||||
albumNum++
|
||||
page = 1
|
||||
pages = albumNum / perPage
|
||||
if albumNum%perPage != 0 {
|
||||
pages++
|
||||
}
|
||||
albumEnd = pageStart + albumNum
|
||||
photosetId = blob.RefFromString(fmt.Sprintf("Photoset %d", albumNum)).DigestPrefix(10)
|
||||
}
|
||||
if pageEnd > albumEnd {
|
||||
pageEnd = albumEnd
|
||||
}
|
||||
photosURL := fmt.Sprintf("%s?extras=original_format&format=json&method=%s&nojsoncallback=1&page=%d&photoset_id=%s&user_id=%s",
|
||||
apiURL, photosetAPIPath, page, photosetId, fakeUserId)
|
||||
response := fmt.Sprintf("%s%s", okHeader, fakePhotoset(photosetId, page, pages, photoIds[pageStart:pageEnd]))
|
||||
responses[photosURL] = httputil.StaticResponder(response)
|
||||
}
|
||||
}
|
||||
|
||||
// All the photo page calls (to get the photos info).
|
||||
// Each page has perPage photos, until end of photos.
|
||||
for i := 1; i <= nPhotosPages; i++ {
|
||||
photosURL := fmt.Sprintf("%s?extras=", apiURL) +
|
||||
url.QueryEscape("description,date_upload,date_taken,original_format,last_update,geo,tags,machine_tags,views,media,url_o") +
|
||||
fmt.Sprintf("&format=json&method=%s&nojsoncallback=1&page=%d&user_id=%s", photosAPIPath, i, fakeUserId)
|
||||
response := fmt.Sprintf("%s%s", okHeader, fakePhotosPage(i, nPhotosPages, perPage, photoIds))
|
||||
responses[photosURL] = httputil.StaticResponder(response)
|
||||
}
|
||||
|
||||
// Actual photo(s) URL.
|
||||
pudgyPic := fakePicture()
|
||||
for _, v := range photoIds {
|
||||
photoURL := fmt.Sprintf("https://farm3.staticflickr.com/2897/14198397111_%s_o.jpg?user_id=%s", v, fakeUserId)
|
||||
responses[photoURL] = httputil.FileResponder(pudgyPic)
|
||||
}
|
||||
|
||||
return httputil.NewFakeTransport(responses)
|
||||
}
|
||||
|
||||
func fakePhotosetsList(sets int) string {
|
||||
var photosets []*photosetInfo
|
||||
for i := 1; i <= sets; i++ {
|
||||
title := fmt.Sprintf("Photoset %d", i)
|
||||
photosetId := blob.RefFromString(title).DigestPrefix(10)
|
||||
primaryPhotoId := blob.RefFromString(fmt.Sprintf("Photo 1 on page 1 of photoset %s", photosetId)).DigestPrefix(10)
|
||||
item := &photosetInfo{
|
||||
Id: photosetId,
|
||||
PrimaryPhotoId: primaryPhotoId,
|
||||
Title: contentString{Content: title},
|
||||
Description: contentString{Content: "fakePhotosetDescription"},
|
||||
}
|
||||
photosets = append(photosets, item)
|
||||
}
|
||||
|
||||
setslist := struct {
|
||||
Photosets photosetList
|
||||
}{
|
||||
Photosets: photosetList{
|
||||
Photoset: photosets,
|
||||
},
|
||||
}
|
||||
|
||||
list, err := json.MarshalIndent(&setslist, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
return string(list)
|
||||
}
|
||||
|
||||
func fakePhotoset(photosetId string, page, pages int, photoIds []string) string {
|
||||
var photos []struct {
|
||||
Id string
|
||||
OriginalFormat string
|
||||
}
|
||||
for _, v := range photoIds {
|
||||
item := struct {
|
||||
Id string
|
||||
OriginalFormat string
|
||||
}{
|
||||
Id: v,
|
||||
OriginalFormat: "jpg",
|
||||
}
|
||||
photos = append(photos, item)
|
||||
}
|
||||
|
||||
photoslist := struct {
|
||||
Photoset photosetItems
|
||||
}{
|
||||
Photoset: photosetItems{
|
||||
Id: photosetId,
|
||||
Page: page,
|
||||
Pages: pages,
|
||||
Photo: photos,
|
||||
},
|
||||
}
|
||||
|
||||
list, err := json.MarshalIndent(&photoslist, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
return string(list)
|
||||
|
||||
}
|
||||
|
||||
func fakePhotosPage(page, pages, perPage int, photoIds []string) string {
|
||||
var photos []*photosSearchItem
|
||||
currentPage := 1
|
||||
indexOnPage := 1
|
||||
day := time.Hour * 24
|
||||
year := day * 365
|
||||
const dateCreatedFormat = "2006-01-02 15:04:05"
|
||||
|
||||
for k, v := range photoIds {
|
||||
if indexOnPage > perPage {
|
||||
currentPage++
|
||||
indexOnPage = 1
|
||||
}
|
||||
if currentPage < page {
|
||||
indexOnPage++
|
||||
continue
|
||||
}
|
||||
created := time.Now().Add(-time.Duration(k) * year)
|
||||
published := created.Add(day)
|
||||
updated := published.Add(day)
|
||||
item := &photosSearchItem{
|
||||
Id: v,
|
||||
Title: fmt.Sprintf("Photo %d", k+1),
|
||||
Description: contentString{Content: "fakePhotoDescription"},
|
||||
DateUpload: fmt.Sprintf("%d", published.Unix()),
|
||||
DateTaken: created.Format(dateCreatedFormat),
|
||||
LastUpdate: fmt.Sprintf("%d", updated.Unix()),
|
||||
URL: fmt.Sprintf("https://farm3.staticflickr.com/2897/14198397111_%s_o.jpg", v),
|
||||
OriginalFormat: "jpg",
|
||||
}
|
||||
photos = append(photos, item)
|
||||
if len(photos) >= perPage {
|
||||
break
|
||||
}
|
||||
indexOnPage++
|
||||
}
|
||||
|
||||
photosPage := &photosSearch{
|
||||
Photos: struct {
|
||||
Page int
|
||||
Pages int
|
||||
Perpage int
|
||||
Total int
|
||||
Photo []*photosSearchItem
|
||||
}{
|
||||
Page: page,
|
||||
Pages: pages,
|
||||
Perpage: perPage,
|
||||
Photo: photos,
|
||||
},
|
||||
}
|
||||
|
||||
list, err := json.MarshalIndent(photosPage, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
return string(list)
|
||||
|
||||
}
|
||||
|
||||
func fakePicture() string {
|
||||
camliDir, err := osutil.GoPackagePath("camlistore.org")
|
||||
if err == os.ErrNotExist {
|
||||
log.Fatal("Directory \"camlistore.org\" not found under GOPATH/src; are you not running with devcam?")
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("Error searching for \"camlistore.org\" under GOPATH: %v", err)
|
||||
}
|
||||
return filepath.Join(camliDir, filepath.FromSlash("third_party/glitch/npc_piggy__x1_walk_png_1354829432.png"))
|
||||
}
|
Loading…
Reference in New Issue