don't retry forever indexing files.

This commit is contained in:
Brad Fitzpatrick 2011-06-08 18:56:56 -07:00
parent c61317703c
commit 97978b56d4
1 changed files with 8 additions and 1 deletions

View File

@ -221,7 +221,14 @@ func (mi *Indexer) populateFile(client *mysql.Client, blobRef *blobref.BlobRef,
fr := ss.NewFileReader(seekFetcher)
n, err := io.Copy(sha1, fr)
if err != nil {
return err
// TODO: job scheduling system to retry this spaced
// out max n times. Right now our options are
// ignoring this error (forever) or returning the
// error and making the indexing try again (likely
// forever failing). Both options suck. For now just
// log and act like all's okay.
log.Printf("mysqlindex: error indexing file %s: %v", blobRef, err)
return nil
}
log.Printf("file %s blobref is %s, size %d", blobRef, blobref.FromHash("sha1", sha1), n)
err = execSQL(client,