mirror of https://github.com/perkeep/perkeep.git
UI: file download support
This commit is contained in:
parent
2c66ab38ad
commit
2da1ef4093
|
@ -33,7 +33,10 @@ import (
|
|||
var _ = log.Printf
|
||||
|
||||
// TODO: rename StreamingFetcher to be Fetch (the common case) and
|
||||
// make a new interface for SeekingFetcher (the rare case)
|
||||
// make a new interface for FetchSeeker (the rare case)
|
||||
|
||||
// TODO: add FetcherAt / FetchAt (for HTTP range requests). But then how
|
||||
// to make all FetchSeeker also be a FetchAt? By hand?
|
||||
|
||||
type Fetcher interface {
|
||||
// Fetch returns a blob. If the blob is not found then
|
||||
|
|
|
@ -25,6 +25,11 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
func ErrorRouting(conn http.ResponseWriter, req *http.Request) {
|
||||
http.Error(conn, "Handlers wired up wrong; this path shouldn't be hit", 500)
|
||||
log.Printf("Internal routing error on %q", req.URL.Path)
|
||||
}
|
||||
|
||||
func BadRequestError(conn http.ResponseWriter, errorMessage string) {
|
||||
conn.WriteHeader(http.StatusBadRequest)
|
||||
log.Printf("Bad request: %s", errorMessage)
|
||||
|
|
|
@ -18,6 +18,8 @@ package schema
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"json"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"camli/blobref"
|
||||
|
@ -30,12 +32,33 @@ type FileReader struct {
|
|||
ccon uint64 // bytes into current chunk already consumed
|
||||
}
|
||||
|
||||
// TODO: make this take a blobref.FetcherAt instead?
|
||||
func NewFileReader(fetcher blobref.Fetcher, fileBlobRef *blobref.BlobRef) (*FileReader, os.Error) {
|
||||
ss := new(Superset)
|
||||
rsc, _, err := fetcher.Fetch(fileBlobRef)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("schema/filereader: fetching file schema blob: %v", err)
|
||||
}
|
||||
if err = json.NewDecoder(rsc).Decode(ss); err != nil {
|
||||
return nil, fmt.Errorf("schema/filereader: decoding file schema blob: %v", err)
|
||||
}
|
||||
if ss.Type != "file" {
|
||||
return nil, fmt.Errorf("schema/filereader: expected \"file\" schema blob, got %q", ss.Type)
|
||||
}
|
||||
return ss.NewFileReader(fetcher), nil
|
||||
}
|
||||
|
||||
func (ss *Superset) NewFileReader(fetcher blobref.Fetcher) *FileReader {
|
||||
// TODO: return an error if ss isn't a Type "file" ?
|
||||
// TODO: return some error if the redundant ss.Size field doesn't match ContentParts?
|
||||
return &FileReader{fetcher, ss, 0, 0}
|
||||
}
|
||||
|
||||
// FileSchema returns the reader's schema superset. Don't mutate it.
|
||||
func (fr *FileReader) FileSchema() *Superset {
|
||||
return fr.ss
|
||||
}
|
||||
|
||||
func (fr *FileReader) Skip(skipBytes uint64) {
|
||||
for skipBytes != 0 && fr.ci < len(fr.ss.ContentParts) {
|
||||
cp := fr.ss.ContentParts[fr.ci]
|
||||
|
@ -75,6 +98,7 @@ func (fr *FileReader) Read(p []byte) (n int, err os.Error) {
|
|||
// read sizes. we should stuff the rsc away in fr
|
||||
// and re-use it just re-seeking if needed, which
|
||||
// could also be tracked.
|
||||
log.Printf("filereader: fetching blob %s", br)
|
||||
rsc, _, ferr := fr.fetcher.Fetch(br)
|
||||
if ferr != nil {
|
||||
return 0, fmt.Errorf("schema: FileReader.Read error fetching blob %s: %v", br, ferr)
|
||||
|
@ -107,4 +131,3 @@ func minu64(a, b uint64) uint64 {
|
|||
}
|
||||
return b
|
||||
}
|
||||
|
||||
|
|
|
@ -26,9 +26,11 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"camli/blobref"
|
||||
"camli/blobserver"
|
||||
"camli/httputil"
|
||||
"camli/jsonconfig"
|
||||
"camli/schema"
|
||||
)
|
||||
|
@ -38,6 +40,12 @@ var _ = log.Printf
|
|||
var staticFilePattern = regexp.MustCompile(`^([a-zA-Z0-9\-\_]+\.(html|js|css|png|jpg|gif))$`)
|
||||
var identPattern = regexp.MustCompile(`^[a-zA-Z\_]+$`)
|
||||
|
||||
// Download URL suffix:
|
||||
// $1: blobref (checked in download handler)
|
||||
// $2: optional "/filename" to be sent as recommended download name,
|
||||
// if sane looking
|
||||
var downloadPattern = regexp.MustCompile(`^download/([^/]+)(/.*)?$`)
|
||||
|
||||
// UIHandler handles serving the UI and discovery JSON.
|
||||
type UIHandler struct {
|
||||
// URL prefixes (path or full URL) to the primary blob and
|
||||
|
@ -147,6 +155,8 @@ func (ui *UIHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
|||
ui.serveDiscovery(rw, req)
|
||||
case wantsUploadHelper(req):
|
||||
ui.serveUploadHelper(rw, req)
|
||||
case strings.HasPrefix(suffix, "download/"):
|
||||
ui.serveDownload(rw, req)
|
||||
default:
|
||||
file := ""
|
||||
if m := staticFilePattern.FindStringSubmatch(suffix); m != nil {
|
||||
|
@ -219,3 +229,49 @@ func (ui *UIHandler) serveUploadHelper(rw http.ResponseWriter, req *http.Request
|
|||
|
||||
}
|
||||
}
|
||||
|
||||
func (ui *UIHandler) serveDownload(rw http.ResponseWriter, req *http.Request) {
|
||||
if ui.Storage == nil {
|
||||
http.Error(rw, "No BlobRoot configured", 500)
|
||||
return
|
||||
}
|
||||
|
||||
fetchSeeker, ok := ui.Storage.(blobref.Fetcher)
|
||||
if !ok {
|
||||
// TODO: wrap ui.Storage in disk-caching wrapper so it can seek
|
||||
http.Error(rw, "TODO: configured BlobRoot doesn't support seeking and disk cache wrapping not yet implemented", 500)
|
||||
return
|
||||
}
|
||||
|
||||
suffix := req.Header.Get("X-PrefixHandler-PathSuffix")
|
||||
|
||||
m := downloadPattern.FindStringSubmatch(suffix)
|
||||
if m == nil {
|
||||
httputil.ErrorRouting(rw, req)
|
||||
return
|
||||
}
|
||||
|
||||
blobref := blobref.Parse(m[1])
|
||||
if blobref == nil {
|
||||
http.Error(rw, "Invalid blobref", 400)
|
||||
return
|
||||
}
|
||||
|
||||
filename := m[2]
|
||||
if len(filename) > 0 {
|
||||
filename = filename[1:] // remove leading slash
|
||||
}
|
||||
|
||||
fr, err := schema.NewFileReader(fetchSeeker, blobref)
|
||||
if err != nil {
|
||||
http.Error(rw, "Can't serve file: "+err.String(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: fr.FileSchema() and guess a mime type? For now:
|
||||
schema := fr.FileSchema()
|
||||
rw.Header().Set("Content-Type", "application/octet-stream")
|
||||
rw.Header().Set("Content-Length", fmt.Sprintf("%d", schema.Size))
|
||||
io.Copy(rw, fr)
|
||||
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
<pre id="blobpre"></pre>
|
||||
|
||||
<h1>Blob Contents</h1>
|
||||
<div id="blobdownload"></div>
|
||||
<pre id="blobdata" style="overflow: auto; max-width: 800px"></pre>
|
||||
|
||||
</body>
|
||||
|
|
|
@ -22,6 +22,8 @@ function getBlobParam() {
|
|||
|
||||
function blobInfoUpdate(bmap) {
|
||||
var blobpre = document.getElementById('blobpre');
|
||||
var bd = document.getElementById("blobdownload")
|
||||
bd.innerHTML = "";
|
||||
var blobref = getBlobParam();
|
||||
if (!blobref) {
|
||||
alert("no blobref?");
|
||||
|
@ -39,6 +41,17 @@ function blobInfoUpdate(bmap) {
|
|||
{
|
||||
success: function(data) {
|
||||
document.getElementById("blobdata").innerHTML = linkifyBlobRefs(data);
|
||||
if (binfo.camliType == "file") {
|
||||
try {
|
||||
finfo = JSON.parse(data);
|
||||
bd.innerHTML = "<a href=''></a>";
|
||||
var fileName = finfo.fileName || blobref;
|
||||
bd.firstChild.href = "./download/" + blobref + "/" + fileName;
|
||||
bd.firstChild.innerText = fileName;
|
||||
bd.innerHTML = "Download: " + bd.innerHTML;
|
||||
} catch (x) {
|
||||
}
|
||||
}
|
||||
},
|
||||
fail: alert
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue