Merge goexif with upstream package

This pulls the changes from the current HEAD of
https://github.com/rwcarlsen/goexif
(rev cf045e9d6ba052fd348f82394d364cca4937589a)

Changes to goexif from upstream:
- Add support for reading Nikon and Canon maker notes
- Adds parser registration to exif package
- Renamed cmd to exifstat
- Renamed exported fields and methods in goexif/tiff
- adds support for bare tiff images. bug fix and minor cosmetics
- support pulling the thumbnail
- adds thumbnail support to exif pkg
- tiff defines DataType and constants for the datatype values
- Update covnertVals and TypeCategory to use new constants for DataType
- Renamed test data dir in exif tests
- created type+constants for raw tiff tag data types

Not merged from upstream:
- ~1 MB of test JPGs in goexif/exif/samples

Minor changes in camlistore.org/pkg/* were neccessary to reflect the
name changes in the exported fields and methods.

Change-Id: I0fdcad2d7b5e01e0d4160a5eb52b8ec750d353cf
This commit is contained in:
Fabian Wickborn 2014-09-03 22:47:24 +02:00
parent 108e16510f
commit f0d9c04bc2
16 changed files with 1214 additions and 501 deletions

View File

@ -434,7 +434,7 @@ func (ix *Index) populateFile(fetcher blob.Fetcher, b *schema.Blob, mm *mutation
}
func tagFormatString(tag *tiff.Tag) string {
switch tag.Format() {
switch tag.TypeCategory() {
case tiff.IntVal:
return "int"
case tiff.RatVal:
@ -472,13 +472,13 @@ func indexEXIF(wholeRef blob.Ref, header []byte, mm *mutationMap) {
return nil
}
key := keyEXIFTag.Key(wholeRef, fmt.Sprintf("%04x", tag.Id))
numComp := int(tag.Ncomp)
if tag.Format() == tiff.StringVal {
numComp := int(tag.Count)
if tag.TypeCategory() == tiff.StringVal {
numComp = 1
}
var val bytes.Buffer
val.WriteString(keyEXIFTag.Val(tagFmt, numComp, ""))
if tag.Format() == tiff.StringVal {
if tag.TypeCategory() == tiff.StringVal {
str := tag.StringVal()
if containsUnsafeRawStrByte(str) {
val.WriteString(urle(str))
@ -486,7 +486,7 @@ func indexEXIF(wholeRef blob.Ref, header []byte, mm *mutationMap) {
val.WriteString(str)
}
} else {
for i := 0; i < int(tag.Ncomp); i++ {
for i := 0; i < int(tag.Count); i++ {
if i > 0 {
val.WriteByte('|')
}

View File

@ -977,7 +977,7 @@ func exifDateTimeInLocation(x *exif.Exif, loc *time.Location) (time.Time, error)
return time.Time{}, err
}
}
if tag.Format() != tiff.StringVal {
if tag.TypeCategory() != tiff.StringVal {
return time.Time{}, errors.New("DateTime[Original] not in string format")
}
const exifTimeLayout = "2006:01:02 15:04:05"

View File

@ -1,82 +1,78 @@
Changing code under third_party/github.com/camlistore/goexif
============================================================
These instructions assume you have a github.com account.
Syncing updates from upstream
-----------------------------
Sync github.com/rwcarlsen/goexif -> github.com/camlistore/goexif
----------------------------------------------------------------
1. Issue a pull request at https://github.com/camlistore/goexif/pulls, set
the base fork to camlistore/goexif go1 branch, and the head fork as
rwcarlsen/goexif go1 branch. Follow the normal github workflow until
someone on the camlistore project merges in the changes.
These instructions assume you have a github.com account. Further it is assumed
that your camlistore copy is at $GOPATH/src/camlistore.org
Sync github.com/camlistore/goexif -> camlistore.org/third_party/github.com/camlistore/goexif
--------------------------------------------------------------------------------------------
1. Once someone on the camlistore team merges in the latest from upstream,
checkout a local copy:
1. Checkout the github.com/rwcarlsen/goexif by calling
$ go get -u github.com/rwcarlsen/goexif
$ cd $GOPATH/src/github.com/rwcarlsen/goexif
$ git clone https://github.com/camlistore/goexif
$ cd goexif
2. Use git show to determine the revision.
$ git show commit cf045e9d6ba052fd348f82394d364cca4937589a
2. Make a patch to apply to the camlistore.org copy. You'll need to know the
git rev of github.com/camlistore/goexif that was last merged to
camlistore.org/third_party, for this example we'll use 030a4566:
3. Start merging using your favorite tools (vimdiff, meld, kdiff3, etc.)
# Create individual patches that have been applied upstream.
$ git format-patch -o /tmp/patches 030a4566
$ cd /path/to/camlistore.org/third_party/github.com/camlistore/goexif
# Create new branch to temporarily apply each upstream change.
$ git checkout -b patches_individual
# Apply patches.
$ git am --directory=third_party/github.com/camlistore/goexif /tmp/patches/*
4. For the moment make sure to rewrite any new goexif import paths. For
example,
# If something fails to apply try:
$ git apply <PATCH THAT FAILED> --reject
$ edit edit edit
$ git add <ANY FIXED FILES>
$ git am --resolved
import "github.com/rwcarlsen/goexif/tiff"
# If it is a patch camlistore already had, because we created it and
# pushed it upstream, you can skip it with:
$ git am --skip
needs to be rewritten to
# Now create a new branch to squash all the changes into one. Keeping a
# record of upstream commits in the default commit message of a single
# commit.
$ git checkout -b patches_squashed master
$ git merge --squash patches_individual
import "camlistore.org/third_party/github.com/camlistore/goexif/tiff"
# Verify no new files have been added that require import path updating:
$ cd /path/to/camlistore.org/third_party/
$ ./rewrite-imports.sh -l
# If any rewrites are required, run:
$ ./rewrite-imports.sh -w
NOTE: Currently, you cannot use rewrite-imports.sh. This is going to change
in the future, as "third_party/github.com/camlistore/goexif/" is going to
be renamed to "third_party/github.com/rwcarlsen/goexif/".
# Now create a commit that will be sent for review.
$ git commit -v
5. Camlistore does not need to have all test data of upstream goexif in its
repository. For that reason, do not merge the file
exif/regress_expected_test.go and the subfolder 'exif/samples' to
camlistore. When merging exif_test.go, make sure to keep the test path to
be "." instead of "samples".
Rationale: regress_expected_test.go is generated anew by exif_test.go (func
TestRegenRegress()), when the non-exported boolean variable 'regenRegress'
is set to true. In upstream, this file is generated on the subfolder
called 'samples', which contains some JPGs. Since Camlistore omits the samples
folder, we change the exif_test.go to use the goexif/exif folder directly
(which contains the single sample1.jpg) instead of the samples subfolder.
Unless new test images are added, regress_expected_test.go does not need to
be regenerated. It is unlikely that Camlistore will add new test data.
6. Ensure all Camlistore code still compiles and all tests run through:
$ devcam test
7. # Now create a commit that will be sent for review.
$ git commit -v
# Enter your commit message on the first line per usual.
# You should see summaries of all the changes merged in the commit
# message. Leave these, they will be useful next sync as we'll know what
# commits were sync'd. Send the change for review.
$ ./misc/review
# Add the goexif revision from step 1. to the commit message as well
# as the commit messages of any noteworthy commits from upstream.
$ devcam review
Sync camlistore.org/third_party/github.com/camlistore/goexif -> github.com/camlistore/goexif
----------------------------------------------------------------------------------------------
1. TODO(wathiede): this should follow a similar process as 'Sync
github.com/camlistore/goexif ->
camlistore.org/third_party/github.com/camlistore/goexif' Basically use
format-patch to generate a patch for each change we've made in
camlistore.org's repo and apply to a fork of github.com/camlistore/goexif.
Maybe skip the 'merge --squash' step, and keep each change in the log of
github.com/camlistore/goexif.
Sync github.com/camlistore/goexif -> github.com/rwcarlsen/goexif
--------------------------------------------------------------------
1. This should follow the standard github pull-request workflow. Issue a
pull request at https://github.com/request/goexif/pulls, set the base fork
to rwcarlsen/goexif go1 branch, and the head fork as camlistore/goexif go1
branch.
Syncing Camlistore contributions back to upstream goexif
------------------------------------------------------------
1. Use a merge tool like meld, kdiff3, or similar to manually merge the code.
$ meld $GOPATH/src/camlistore.org/third_party/github.com/camlistore/goexif \
$GOPATH/src/github.com/rwcarlsen/goexif
2. While merging, make sure to have the correct goexif import paths. For
example,
import "camlistore.org/third_party/github.com/camlistore/goexif/tiff"
needs to be
import "github.com/rwcarlsen/goexif/tiff"
3. Make sure the samples folder path in exif_test.go stays "sample".
4. Follow the github.com procedures to commit and submit a Pull Request.
2. Address any feedback during review and rwcarlsen will merge changes to
github.com/rwcarlsen/goexif as appropriate.

View File

@ -4,18 +4,20 @@ goexif
Provides decoding of basic exif and tiff encoded data. Still in alpha - no guarantees.
Suggestions and pull requests are welcome. Functionality is split into two packages - "exif" and "tiff"
The exif package depends on the tiff package.
Documentation can be found at http://go.pkgdoc.org/github.com/camlistore/goexif
Documentation can be found at http://godoc.org/github.com/rwcarlsen/goexif
Like goexif? - Bitcoin tips welcome: 17w65FVqx196Qp7tfCCSLqyvsHUhiEEa7P
To install, in a terminal type:
```
go get github.com/camlistore/goexif/exif
go get github.com/rwcarlsen/goexif/exif
```
Or if you just want the tiff package:
```
go get github.com/camlistore/goexif/tiff
go get github.com/rwcarlsen/goexif/tiff
```
Example usage:
@ -28,31 +30,31 @@ import (
"log"
"fmt"
"github.com/camlistore/goexif/exif"
"github.com/rwcarlsen/goexif/exif"
)
func main() {
fname := "sample1.jpg"
fname := "sample1.jpg"
f, err := os.Open(fname)
if err != nil {
log.Fatal(err)
}
f, err := os.Open(fname)
if err != nil {
log.Fatal(err)
}
x, err := exif.Decode(f)
f.Close()
if err != nil {
log.Fatal(err)
}
x, err := exif.Decode(f)
defer f.Close()
if err != nil {
log.Fatal(err)
}
camModel, _ := x.Get("Model")
date, _ := x.Get("DateTimeOriginal")
fmt.Println(camModel.StringVal())
fmt.Println(date.StringVal())
camModel, _ := x.Get(exif.Model)
date, _ := x.Get(exif.DateTimeOriginal)
fmt.Println(camModel.StringVal())
fmt.Println(date.StringVal())
focal, _ := x.Get("FocalLength")
numer, denom := focal.Rat2(0) // retrieve first (only) rat. value
fmt.Printf("%v/%v", numer, denom)
focal, _ := x.Get(exif.FocalLength)
numer, denom := focal.Rat2(0) // retrieve first (only) rat. value
fmt.Printf("%v/%v", numer, denom)
}
```

View File

@ -1,36 +0,0 @@
package main
import (
"flag"
"fmt"
"log"
"os"
"camlistore.org/third_party/github.com/camlistore/goexif/exif"
"camlistore.org/third_party/github.com/camlistore/goexif/tiff"
)
func main() {
flag.Parse()
fname := flag.Arg(0)
f, err := os.Open(fname)
if err != nil {
log.Fatal(err)
}
x, err := exif.Decode(f)
if err != nil {
log.Fatal(err)
}
x.Walk(Walker{})
}
type Walker struct{}
func (_ Walker) Walk(name exif.FieldName, tag *tiff.Tag) error {
data, _ := tag.MarshalJSON()
fmt.Printf("%v: %v\n", name, string(data))
return nil
}

View File

@ -21,12 +21,12 @@ func ExampleDecode() {
log.Fatal(err)
}
camModel, _ := x.Get("Model")
date, _ := x.Get("DateTimeOriginal")
camModel, _ := x.Get(exif.Model)
date, _ := x.Get(exif.DateTimeOriginal)
fmt.Println(camModel.StringVal())
fmt.Println(date.StringVal())
focal, _ := x.Get("FocalLength")
focal, _ := x.Get(exif.FocalLength)
numer, denom := focal.Rat2(0) // retrieve first (only) rat. value
fmt.Printf("%v/%v", numer, denom)
}

View File

@ -1,5 +1,5 @@
// Package exif implements decoding of EXIF data as defined in the EXIF 2.2
// specification.
// specification (http://www.exif.org/Exif2-2.PDF).
package exif
import (
@ -10,28 +10,16 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"strings"
"time"
"camlistore.org/third_party/github.com/camlistore/goexif/tiff"
)
var validField map[FieldName]bool
func init() {
validField = make(map[FieldName]bool)
for _, name := range exifFields {
validField[name] = true
}
for _, name := range gpsFields {
validField[name] = true
}
for _, name := range interopFields {
validField[name] = true
}
}
const (
jpeg_APP1 = 0xE1
exifPointer = 0x8769
gpsPointer = 0x8825
interopPointer = 0xA005
@ -45,32 +33,95 @@ func (tag TagNotPresentError) Error() string {
return fmt.Sprintf("exif: tag %q is not present", string(tag))
}
func isTagNotPresentErr(err error) bool {
_, ok := err.(TagNotPresentError)
return ok
// Parser allows the registration of custom parsing and field loading
// in the Decode function.
type Parser interface {
// Parse should read data from x and insert parsed fields into x via
// LoadTags.
Parse(x *Exif) error
}
var parsers []Parser
func init() {
RegisterParsers(&parser{})
}
// RegisterParsers registers one or more parsers to be automatically called
// when decoding EXIF data via the Decode function.
func RegisterParsers(ps ...Parser) {
parsers = append(parsers, ps...)
}
type parser struct{}
func (p *parser) Parse(x *Exif) error {
x.LoadTags(x.Tiff.Dirs[0], exifFields, false)
// thumbnails
if len(x.Tiff.Dirs) >= 2 {
x.LoadTags(x.Tiff.Dirs[1], thumbnailFields, false)
}
// recurse into exif, gps, and interop sub-IFDs
if err := loadSubDir(x, ExifIFDPointer, exifFields); err != nil {
return err
}
if err := loadSubDir(x, GPSInfoIFDPointer, gpsFields); err != nil {
return err
}
return loadSubDir(x, InteroperabilityIFDPointer, interopFields)
}
func loadSubDir(x *Exif, ptr FieldName, fieldMap map[uint16]FieldName) error {
r := bytes.NewReader(x.Raw)
tag, err := x.Get(ptr)
if err != nil {
return nil
}
offset := tag.Int(0)
_, err = r.Seek(offset, 0)
if err != nil {
return errors.New("exif: seek to sub-IFD failed: " + err.Error())
}
subDir, _, err := tiff.DecodeDir(r, x.Tiff.Order)
if err != nil {
return errors.New("exif: sub-IFD decode failed: " + err.Error())
}
x.LoadTags(subDir, fieldMap, false)
return nil
}
// Exif provides access to decoded EXIF metadata fields and values.
type Exif struct {
tif *tiff.Tiff
Tiff *tiff.Tiff
main map[FieldName]*tiff.Tag
Raw []byte
}
// Decode parses EXIF-encoded data from r and returns a queryable Exif object.
// Decode parses EXIF-encoded data from r and returns a queryable Exif
// object. After the exif data section is called and the tiff structure
// decoded, each registered parser is called (in order of registration). If
// one parser returns an error, decoding terminates and the remaining
// parsers are not called.
func Decode(r io.Reader) (*Exif, error) {
// EXIF data in JPEG is stored in the APP1 marker. EXIF data uses the TIFF
// format to store data.
// If we're parsing a TIFF image, we don't need to strip away any data.
// If we're parsing a JPEG image, we need to strip away the JPEG APP1
// marker and also the EXIF header.
header := make([]byte, 4)
n, err := r.Read(header)
if n < len(header) {
return nil, errors.New("exif: short read on header")
}
if err != nil {
return nil, err
}
if n < len(header) {
return nil, errors.New("exif: short read on header")
}
var isTiff bool
switch string(header) {
@ -100,9 +151,9 @@ func Decode(r io.Reader) (*Exif, error) {
tif, err = tiff.Decode(tr)
er = bytes.NewReader(b.Bytes())
} else {
// Strip away JPEG APP1 header.
// Locate the JPEG APP1 header.
var sec *appSec
sec, err = newAppSec(0xE1, r)
sec, err = newAppSec(jpeg_APP1, r)
if err != nil {
return nil, err
}
@ -115,55 +166,47 @@ func Decode(r io.Reader) (*Exif, error) {
}
if err != nil {
return nil, errors.New("exif: decode failed: " + err.Error())
return nil, fmt.Errorf("exif: decode failed (%v) ", err)
}
er.Seek(0, 0)
raw, err := ioutil.ReadAll(er)
if err != nil {
return nil, fmt.Errorf("exif: decode failed (%v) ", err)
}
// build an exif structure from the tiff
x := &Exif{
main: map[FieldName]*tiff.Tag{},
tif: tif,
Tiff: tif,
Raw: raw,
}
ifd0 := tif.Dirs[0]
for _, tag := range ifd0.Tags {
name := exifFields[tag.Id]
x.main[name] = tag
}
// recurse into exif, gps, and interop sub-IFDs
if err = x.loadSubDir(er, exifIFDPointer, exifFields); err != nil {
return x, err
}
if err = x.loadSubDir(er, gpsInfoIFDPointer, gpsFields); err != nil {
return x, err
}
if err = x.loadSubDir(er, interoperabilityIFDPointer, interopFields); err != nil {
return x, err
for i, p := range parsers {
if err := p.Parse(x); err != nil {
return x, fmt.Errorf("exif: parser %v failed (%v)", i, err)
}
}
return x, nil
}
func (x *Exif) loadSubDir(r *bytes.Reader, ptrName FieldName, fieldMap map[uint16]FieldName) error {
tag, ok := x.main[ptrName]
if !ok {
return nil
}
offset := tag.Int(0)
_, err := r.Seek(offset, 0)
if err != nil {
return errors.New("exif: seek to sub-IFD failed: " + err.Error())
}
subDir, _, err := tiff.DecodeDir(r, x.tif.Order)
if err != nil {
return errors.New("exif: sub-IFD decode failed: " + err.Error())
}
for _, tag := range subDir.Tags {
// LoadTags loads tags into the available fields from the tiff Directory
// using the given tagid-fieldname mapping. Used to load makernote and
// other meta-data. If showMissing is true, tags in d that are not in the
// fieldMap will be loaded with the FieldName UnknownPrefix followed by the
// tag ID (in hex format).
func (x *Exif) LoadTags(d *tiff.Dir, fieldMap map[uint16]FieldName, showMissing bool) {
for _, tag := range d.Tags {
name := fieldMap[tag.Id]
if name == "" {
if !showMissing {
continue
}
name = FieldName(fmt.Sprintf("%v%x", UnknownPrefix, tag.Id))
}
x.main[name] = tag
}
return nil
}
// Get retrieves the EXIF tag for the given field name.
@ -171,22 +214,21 @@ func (x *Exif) loadSubDir(r *bytes.Reader, ptrName FieldName, fieldMap map[uint1
// If the tag is not known or not present, an error is returned. If the
// tag name is known, the error will be a TagNotPresentError.
func (x *Exif) Get(name FieldName) (*tiff.Tag, error) {
if !validField[name] {
return nil, fmt.Errorf("exif: invalid tag name %q", name)
} else if tg, ok := x.main[name]; ok {
if tg, ok := x.main[name]; ok {
return tg, nil
}
return nil, TagNotPresentError(name)
}
// Walker is the interface used to traverse all exif fields of an Exif object.
// Returning a non-nil error aborts the walk/traversal.
// Walker is the interface used to traverse all fields of an Exif object.
type Walker interface {
// Walk is called for each non-nil EXIF field. Returning a non-nil
// error aborts the walk/traversal.
Walk(name FieldName, tag *tiff.Tag) error
}
// Walk calls the Walk method of w with the name and tag for every non-nil exif
// field.
// Walk calls the Walk method of w with the name and tag for every non-nil
// EXIF field. If w aborts the walk with an error, that error is returned.
func (x *Exif) Walk(w Walker) error {
for name, tag := range x.main {
if err := w.Walk(name, tag); err != nil {
@ -214,7 +256,7 @@ func (x *Exif) DateTime() (time.Time, error) {
return dt, err
}
}
if tag.Format() != tiff.StringVal {
if tag.TypeCategory() != tiff.StringVal {
return dt, errors.New("DateTime[Original] not in string format")
}
exifTimeLayout := "2006:01:02 15:04:05"
@ -271,6 +313,22 @@ func (x *Exif) String() string {
return buf.String()
}
// JpegThumbnail returns the jpeg thumbnail if it exists. If it doesn't exist,
// TagNotPresentError will be returned
func (x *Exif) JpegThumbnail() ([]byte, error) {
offset, err := x.Get(ThumbJPEGInterchangeFormat)
if err != nil {
return nil, err
}
length, err := x.Get(ThumbJPEGInterchangeFormatLength)
if err != nil {
return nil, err
}
return x.Raw[offset.Int(0) : offset.Int(0)+length.Int(0)], nil
}
// MarshalJson implements the encoding/json.Marshaler interface providing output of
// all EXIF fields present (names and values).
func (x Exif) MarshalJSON() ([]byte, error) {
return json.Marshal(x.main)
}
@ -285,7 +343,7 @@ type appSec struct {
func newAppSec(marker byte, r io.Reader) (*appSec, error) {
br := bufio.NewReader(r)
app := &appSec{marker: marker}
var dataLen uint16
var dataLen int
// seek to marker
for dataLen == 0 {
@ -303,16 +361,16 @@ func newAppSec(marker byte, r io.Reader) (*appSec, error) {
if err != nil {
return nil, err
}
dataLen = binary.BigEndian.Uint16(dataLenBytes)
dataLen = int(binary.BigEndian.Uint16(dataLenBytes))
}
// read section data
nread := 0
for nread < int(dataLen) {
s := make([]byte, int(dataLen)-nread)
for nread < dataLen {
s := make([]byte, dataLen-nread)
n, err := br.Read(s)
nread += n
if err != nil && nread < int(dataLen) {
if err != nil && nread < dataLen {
return nil, err
}
app.data = append(app.data, s[:n]...)

View File

@ -1,62 +1,142 @@
package exif
import (
"flag"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"testing"
"camlistore.org/third_party/github.com/camlistore/goexif/tiff"
)
func TestDecode(t *testing.T) {
name := "sample1.jpg"
f, err := os.Open(name)
if err != nil {
t.Fatalf("%v\n", err)
// switch to true to regenerate regression expected values
var regenRegress = false
var dataDir = flag.String("test_data_dir", ".", "Directory where the data files for testing are located")
// TestRegenRegress regenerates the expected image exif fields/values for
// sample images.
func TestRegenRegress(t *testing.T) {
if !regenRegress {
return
}
x, err := Decode(f)
dst, err := os.Create("regress_expected_test.go")
if err != nil {
t.Fatal(err)
}
if x == nil {
t.Fatalf("No error and yet %v was not decoded\n", name)
defer dst.Close()
dir, err := os.Open(".")
if err != nil {
t.Fatal(err)
}
defer dir.Close()
val, err := x.Get("Model")
t.Logf("Model: %v", val)
t.Log(x)
names, err := dir.Readdirnames(0)
if err != nil {
t.Fatal(err)
}
for i, name := range names {
names[i] = filepath.Join(".", name)
}
makeExpected(names, dst)
}
type walker struct {
t *testing.T
func makeExpected(files []string, w io.Writer) {
fmt.Fprintf(w, "package exif\n\n")
fmt.Fprintf(w, "var regressExpected = map[string]map[FieldName]string{\n")
for _, name := range files {
f, err := os.Open(name)
if err != nil {
continue
}
x, err := Decode(f)
if err != nil {
f.Close()
continue
}
fmt.Fprintf(w, "\t\"%v\": map[FieldName]string{\n", filepath.Base(name))
x.Walk(&regresswalk{w})
fmt.Fprintf(w, "\t},\n")
f.Close()
}
fmt.Fprintf(w, "}\n")
}
func (w *walker) Walk(name FieldName, tag *tiff.Tag) error {
w.t.Logf("%v: %v", name, tag)
type regresswalk struct {
wr io.Writer
}
func (w *regresswalk) Walk(name FieldName, tag *tiff.Tag) error {
if strings.HasPrefix(string(name), UnknownPrefix) {
fmt.Fprintf(w.wr, "\t\t\"%v\": `%v`,\n", name, tag.String())
} else {
fmt.Fprintf(w.wr, "\t\t%v: `%v`,\n", name, tag.String())
}
return nil
}
func TestWalk(t *testing.T) {
name := "sample1.jpg"
f, err := os.Open(name)
func TestDecode(t *testing.T) {
fpath := filepath.Join(*dataDir, "")
f, err := os.Open(fpath)
if err != nil {
t.Fatalf("%v\n", err)
t.Fatalf("Could not open sample directory '%s': %v", fpath, err)
}
x, err := Decode(f)
names, err := f.Readdirnames(0)
if err != nil {
t.Error(err)
}
if x == nil {
t.Fatal("bad err")
t.Fatalf("Could not read sample directory '%s': %v", fpath, err)
}
x.Walk(&walker{t})
cnt := 0
for _, name := range names {
if !strings.HasSuffix(name, ".jpg") {
t.Logf("skipping non .jpg file %v", name)
continue
}
t.Logf("testing file %v", name)
f, err := os.Open(filepath.Join(fpath, name))
if err != nil {
t.Fatal(err)
}
x, err := Decode(f)
if err != nil {
t.Fatal(err)
} else if x == nil {
t.Fatalf("No error and yet %v was not decoded", name)
}
x.Walk(&walker{name, t})
cnt++
}
if cnt != len(regressExpected) {
t.Errorf("Did not process enough samples, got %d, want %d", cnt, len(regressExpected))
}
}
type walker struct {
picName string
t *testing.T
}
func (w *walker) Walk(field FieldName, tag *tiff.Tag) error {
// this needs to be commented out when regenerating regress expected vals
if v := regressExpected[w.picName][field]; v != tag.String() {
w.t.Errorf("pic %v: expected '%v' got '%v'", w.picName, v, tag.String())
}
return nil
}
func TestMarshal(t *testing.T) {
name := "sample1.jpg"
name := filepath.Join(*dataDir, "sample1.jpg")
f, err := os.Open(name)
if err != nil {
t.Fatalf("%v\n", err)

View File

@ -2,18 +2,137 @@ package exif
type FieldName string
// UnknownPrefix is used as the first part of field names for decoded tags for
// which there is no known/supported EXIF field.
const UnknownPrefix = "UnknownTag_"
// Primary EXIF fields
const (
ImageWidth FieldName = "ImageWidth"
ImageLength FieldName = "ImageLength" // height
Orientation FieldName = "Orientation"
DateTime FieldName = "DateTime"
DateTimeOriginal FieldName = "DateTimeOriginal"
ImageWidth FieldName = "ImageWidth"
ImageLength = "ImageLength" // Image height called Length by EXIF spec
BitsPerSample = "BitsPerSample"
Compression = "Compression"
PhotometricInterpretation = "PhotometricInterpretation"
Orientation = "Orientation"
SamplesPerPixel = "SamplesPerPixel"
PlanarConfiguration = "PlanarConfiguration"
YCbCrSubSampling = "YCbCrSubSampling"
YCbCrPositioning = "YCbCrPositioning"
XResolution = "XResolution"
YResolution = "YResolution"
ResolutionUnit = "ResolutionUnit"
DateTime = "DateTime"
ImageDescription = "ImageDescription"
Make = "Make"
Model = "Model"
Software = "Software"
Artist = "Artist"
Copyright = "Copyright"
ExifIFDPointer = "ExifIFDPointer"
GPSInfoIFDPointer = "GPSInfoIFDPointer"
InteroperabilityIFDPointer = "InteroperabilityIFDPointer"
ExifVersion = "ExifVersion"
FlashpixVersion = "FlashpixVersion"
ColorSpace = "ColorSpace"
ComponentsConfiguration = "ComponentsConfiguration"
CompressedBitsPerPixel = "CompressedBitsPerPixel"
PixelXDimension = "PixelXDimension"
PixelYDimension = "PixelYDimension"
MakerNote = "MakerNote"
UserComment = "UserComment"
RelatedSoundFile = "RelatedSoundFile"
DateTimeOriginal = "DateTimeOriginal"
DateTimeDigitized = "DateTimeDigitized"
SubSecTime = "SubSecTime"
SubSecTimeOriginal = "SubSecTimeOriginal"
SubSecTimeDigitized = "SubSecTimeDigitized"
ImageUniqueID = "ImageUniqueID"
ExposureTime = "ExposureTime"
FNumber = "FNumber"
ExposureProgram = "ExposureProgram"
SpectralSensitivity = "SpectralSensitivity"
ISOSpeedRatings = "ISOSpeedRatings"
OECF = "OECF"
ShutterSpeedValue = "ShutterSpeedValue"
ApertureValue = "ApertureValue"
BrightnessValue = "BrightnessValue"
ExposureBiasValue = "ExposureBiasValue"
MaxApertureValue = "MaxApertureValue"
SubjectDistance = "SubjectDistance"
MeteringMode = "MeteringMode"
LightSource = "LightSource"
Flash = "Flash"
FocalLength = "FocalLength"
SubjectArea = "SubjectArea"
FlashEnergy = "FlashEnergy"
SpatialFrequencyResponse = "SpatialFrequencyResponse"
FocalPlaneXResolution = "FocalPlaneXResolution"
FocalPlaneYResolution = "FocalPlaneYResolution"
FocalPlaneResolutionUnit = "FocalPlaneResolutionUnit"
SubjectLocation = "SubjectLocation"
ExposureIndex = "ExposureIndex"
SensingMethod = "SensingMethod"
FileSource = "FileSource"
SceneType = "SceneType"
CFAPattern = "CFAPattern"
CustomRendered = "CustomRendered"
ExposureMode = "ExposureMode"
WhiteBalance = "WhiteBalance"
DigitalZoomRatio = "DigitalZoomRatio"
FocalLengthIn35mmFilm = "FocalLengthIn35mmFilm"
SceneCaptureType = "SceneCaptureType"
GainControl = "GainControl"
Contrast = "Contrast"
Saturation = "Saturation"
Sharpness = "Sharpness"
DeviceSettingDescription = "DeviceSettingDescription"
SubjectDistanceRange = "SubjectDistanceRange"
)
// thumbnail fields
const (
exifIFDPointer FieldName = "ExifIFDPointer"
gpsInfoIFDPointer = "GPSInfoIFDPointer"
interoperabilityIFDPointer = "InteroperabilityIFDPointer"
ThumbJPEGInterchangeFormat = "ThumbJPEGInterchangeFormat" // offset to thumb jpeg SOI
ThumbJPEGInterchangeFormatLength = "ThumbJPEGInterchangeFormatLength" // byte length of thumb
)
// GPS fields
const (
GPSVersionID FieldName = "GPSVersionID"
GPSLatitudeRef = "GPSLatitudeRef"
GPSLatitude = "GPSLatitude"
GPSLongitudeRef = "GPSLongitudeRef"
GPSLongitude = "GPSLongitude"
GPSAltitudeRef = "GPSAltitudeRef"
GPSAltitude = "GPSAltitude"
GPSTimeStamp = "GPSTimeStamp"
GPSSatelites = "GPSSatelites"
GPSStatus = "GPSStatus"
GPSMeasureMode = "GPSMeasureMode"
GPSDOP = "GPSDOP"
GPSSpeedRef = "GPSSpeedRef"
GPSSpeed = "GPSSpeed"
GPSTrackRef = "GPSTrackRef"
GPSTrack = "GPSTrack"
GPSImgDirectionRef = "GPSImgDirectionRef"
GPSImgDirection = "GPSImgDirection"
GPSMapDatum = "GPSMapDatum"
GPSDestLatitudeRef = "GPSDestLatitudeRef"
GPSDestLatitude = "GPSDestLatitude"
GPSDestLongitudeRef = "GPSDestLongitudeRef"
GPSDestLongitude = "GPSDestLongitude"
GPSDestBearingRef = "GPSDestBearingRef"
GPSDestBearing = "GPSDestBearing"
GPSDestDistanceRef = "GPSDestDistanceRef"
GPSDestDistance = "GPSDestDistance"
GPSProcessingMethod = "GPSProcessingMethod"
GPSAreaInformation = "GPSAreaInformation"
GPSDateStamp = "GPSDateStamp"
GPSDifferential = "GPSDifferential"
)
// interoperability fields
const (
InteroperabilityIndex FieldName = "InteroperabilityIndex"
)
var exifFields = map[uint16]FieldName{
@ -21,145 +140,150 @@ var exifFields = map[uint16]FieldName{
////////// IFD 0 ////////////////////
/////////////////////////////////////
// image data structure
0x0100: "ImageWidth",
0x0101: "ImageLength",
0x0102: "BitsPerSample",
0x0103: "Compression",
0x0106: "PhotometricInterpretation",
0x0112: "Orientation",
0x0115: "SamplesPerPixel",
0x011C: "PlanarConfiguration",
0x0212: "YCbCrSubSampling",
0x0213: "YCbCrPositioning",
0x011A: "XResolution",
0x011B: "YResolution",
0x0128: "ResolutionUnit",
// image data structure for the thumbnail
0x0100: ImageWidth,
0x0101: ImageLength,
0x0102: BitsPerSample,
0x0103: Compression,
0x0106: PhotometricInterpretation,
0x0112: Orientation,
0x0115: SamplesPerPixel,
0x011C: PlanarConfiguration,
0x0212: YCbCrSubSampling,
0x0213: YCbCrPositioning,
0x011A: XResolution,
0x011B: YResolution,
0x0128: ResolutionUnit,
// Other tags
0x0132: "DateTime",
0x010E: "ImageDescription",
0x010F: "Make",
0x0110: "Model",
0x0131: "Software",
0x013B: "Artist",
0x8298: "Copyright",
0x0132: DateTime,
0x010E: ImageDescription,
0x010F: Make,
0x0110: Model,
0x0131: Software,
0x013B: Artist,
0x8298: Copyright,
// private tags
exifPointer: "ExifIFDPointer",
exifPointer: ExifIFDPointer,
/////////////////////////////////////
////////// Exif sub IFD /////////////
/////////////////////////////////////
gpsPointer: "GPSInfoIFDPointer",
interopPointer: "InteroperabilityIFDPointer",
gpsPointer: GPSInfoIFDPointer,
interopPointer: InteroperabilityIFDPointer,
0x9000: "ExifVersion",
0xA000: "FlashpixVersion",
0x9000: ExifVersion,
0xA000: FlashpixVersion,
0xA001: "ColorSpace",
0xA001: ColorSpace,
0x9101: "ComponentsConfiguration",
0x9102: "CompressedBitsPerPixel",
0xA002: "PixelXDimension",
0xA003: "PixelYDimension",
0x9101: ComponentsConfiguration,
0x9102: CompressedBitsPerPixel,
0xA002: PixelXDimension,
0xA003: PixelYDimension,
0x927C: "MakerNote",
0x9286: "UserComment",
0x927C: MakerNote,
0x9286: UserComment,
0xA004: "RelatedSoundFile",
0x9003: "DateTimeOriginal",
0x9004: "DateTimeDigitized",
0x9290: "SubSecTime",
0x9291: "SubSecTimeOriginal",
0x9292: "SubSecTimeDigitized",
0xA004: RelatedSoundFile,
0x9003: DateTimeOriginal,
0x9004: DateTimeDigitized,
0x9290: SubSecTime,
0x9291: SubSecTimeOriginal,
0x9292: SubSecTimeDigitized,
0xA420: "ImageUniqueID",
0xA420: ImageUniqueID,
// picture conditions
0x829A: "ExposureTime",
0x829D: "FNumber",
0x8822: "ExposureProgram",
0x8824: "SpectralSensitivity",
0x8827: "ISOSpeedRatings",
0x8828: "OECF",
0x9201: "ShutterSpeedValue",
0x9202: "ApertureValue",
0x9203: "BrightnessValue",
0x9204: "ExposureBiasValue",
0x9205: "MaxApertureValue",
0x9206: "SubjectDistance",
0x9207: "MeteringMode",
0x9208: "LightSource",
0x9209: "Flash",
0x920A: "FocalLength",
0x9214: "SubjectArea",
0xA20B: "FlashEnergy",
0xA20C: "SpatialFrequencyResponse",
0xA20E: "FocalPlaneXResolution",
0xA20F: "FocalPlaneYResolution",
0xA210: "FocalPlaneResolutionUnit",
0xA214: "SubjectLocation",
0xA215: "ExposureIndex",
0xA217: "SensingMethod",
0xA300: "FileSource",
0xA301: "SceneType",
0xA302: "CFAPattern",
0xA401: "CustomRendered",
0xA402: "ExposureMode",
0xA403: "WhiteBalance",
0xA404: "DigitalZoomRatio",
0xA405: "FocalLengthIn35mmFilm",
0xA406: "SceneCaptureType",
0xA407: "GainControl",
0xA408: "Contrast",
0xA409: "Saturation",
0xA40A: "Sharpness",
0xA40B: "DeviceSettingDescription",
0xA40C: "SubjectDistanceRange",
0x829A: ExposureTime,
0x829D: FNumber,
0x8822: ExposureProgram,
0x8824: SpectralSensitivity,
0x8827: ISOSpeedRatings,
0x8828: OECF,
0x9201: ShutterSpeedValue,
0x9202: ApertureValue,
0x9203: BrightnessValue,
0x9204: ExposureBiasValue,
0x9205: MaxApertureValue,
0x9206: SubjectDistance,
0x9207: MeteringMode,
0x9208: LightSource,
0x9209: Flash,
0x920A: FocalLength,
0x9214: SubjectArea,
0xA20B: FlashEnergy,
0xA20C: SpatialFrequencyResponse,
0xA20E: FocalPlaneXResolution,
0xA20F: FocalPlaneYResolution,
0xA210: FocalPlaneResolutionUnit,
0xA214: SubjectLocation,
0xA215: ExposureIndex,
0xA217: SensingMethod,
0xA300: FileSource,
0xA301: SceneType,
0xA302: CFAPattern,
0xA401: CustomRendered,
0xA402: ExposureMode,
0xA403: WhiteBalance,
0xA404: DigitalZoomRatio,
0xA405: FocalLengthIn35mmFilm,
0xA406: SceneCaptureType,
0xA407: GainControl,
0xA408: Contrast,
0xA409: Saturation,
0xA40A: Sharpness,
0xA40B: DeviceSettingDescription,
0xA40C: SubjectDistanceRange,
}
var gpsFields = map[uint16]FieldName{
/////////////////////////////////////
//// GPS sub-IFD ////////////////////
/////////////////////////////////////
0x0: "GPSVersionID",
0x1: "GPSLatitudeRef",
0x2: "GPSLatitude",
0x3: "GPSLongitudeRef",
0x4: "GPSLongitude",
0x5: "GPSAltitudeRef",
0x6: "GPSAltitude",
0x7: "GPSTimeStamp",
0x8: "GPSSatelites",
0x9: "GPSStatus",
0xA: "GPSMeasureMode",
0xB: "GPSDOP",
0xC: "GPSSpeedRef",
0xD: "GPSSpeed",
0xE: "GPSTrackRef",
0xF: "GPSTrack",
0x10: "GPSImgDirectionRef",
0x11: "GPSImgDirection",
0x12: "GPSMapDatum",
0x13: "GPSDestLatitudeRef",
0x14: "GPSDestLatitude",
0x15: "GPSDestLongitudeRef",
0x16: "GPSDestLongitude",
0x17: "GPSDestBearingRef",
0x18: "GPSDestBearing",
0x19: "GPSDestDistanceRef",
0x1A: "GPSDestDistance",
0x1B: "GPSProcessingMethod",
0x1C: "GPSAreaInformation",
0x1D: "GPSDateStamp",
0x1E: "GPSDifferential",
0x0: GPSVersionID,
0x1: GPSLatitudeRef,
0x2: GPSLatitude,
0x3: GPSLongitudeRef,
0x4: GPSLongitude,
0x5: GPSAltitudeRef,
0x6: GPSAltitude,
0x7: GPSTimeStamp,
0x8: GPSSatelites,
0x9: GPSStatus,
0xA: GPSMeasureMode,
0xB: GPSDOP,
0xC: GPSSpeedRef,
0xD: GPSSpeed,
0xE: GPSTrackRef,
0xF: GPSTrack,
0x10: GPSImgDirectionRef,
0x11: GPSImgDirection,
0x12: GPSMapDatum,
0x13: GPSDestLatitudeRef,
0x14: GPSDestLatitude,
0x15: GPSDestLongitudeRef,
0x16: GPSDestLongitude,
0x17: GPSDestBearingRef,
0x18: GPSDestBearing,
0x19: GPSDestDistanceRef,
0x1A: GPSDestDistance,
0x1B: GPSProcessingMethod,
0x1C: GPSAreaInformation,
0x1D: GPSDateStamp,
0x1E: GPSDifferential,
}
var interopFields = map[uint16]FieldName{
/////////////////////////////////////
//// Interoperability sub-IFD ///////
/////////////////////////////////////
0x1: "InteroperabilityIndex",
0x1: InteroperabilityIndex,
}
var thumbnailFields = map[uint16]FieldName{
0x0201: ThumbJPEGInterchangeFormat,
0x0202: ThumbJPEGInterchangeFormatLength,
}

View File

@ -0,0 +1,62 @@
package exif
var regressExpected = map[string]map[FieldName]string{
"sample1.jpg": map[FieldName]string{
ExifIFDPointer: `{Id: 8769, Val: [216]}`,
YResolution: `{Id: 11B, Val: ["256/1"]}`,
PixelXDimension: `{Id: A002, Val: [500]}`,
FocalLengthIn35mmFilm: `{Id: A405, Val: [35]}`,
GPSLatitudeRef: `{Id: 1, Val: "N"}`,
FNumber: `{Id: 829D, Val: ["45/10"]}`,
GPSTimeStamp: `{Id: 7, Val: ["18/1","7/1","37/1"]}`,
SubSecTime: `{Id: 9290, Val: "63"}`,
ExifVersion: `{Id: 9000, Val: "0220"}`,
PixelYDimension: `{Id: A003, Val: [375]}`,
ExposureMode: `{Id: A402, Val: [0]}`,
Saturation: `{Id: A409, Val: [0]}`,
GPSInfoIFDPointer: `{Id: 8825, Val: [820]}`,
ExposureBiasValue: `{Id: 9204, Val: ["0/6"]}`,
MeteringMode: `{Id: 9207, Val: [3]}`,
Flash: `{Id: 9209, Val: [0]}`,
SubSecTimeOriginal: `{Id: 9291, Val: "63"}`,
FileSource: `{Id: A300, Val: ""}`,
GainControl: `{Id: A407, Val: [1]}`,
SubjectDistanceRange: `{Id: A40C, Val: [0]}`,
ThumbJPEGInterchangeFormatLength: `{Id: 202, Val: [4034]}`,
FlashpixVersion: `{Id: A000, Val: "0100"}`,
UserComment: `{Id: 9286, Val: "taken at basilica of chinese"}`,
CustomRendered: `{Id: A401, Val: [0]}`,
GPSVersionID: `{Id: 0, Val: [2,2,0,0]}`,
Orientation: `{Id: 112, Val: [1]}`,
DateTimeDigitized: `{Id: 9004, Val: "2003:11:23 18:07:37"}`,
RelatedSoundFile: `{Id: A004, Val: " "}`,
DigitalZoomRatio: `{Id: A404, Val: ["1/1"]}`,
Sharpness: `{Id: A40A, Val: [0]}`,
Model: `{Id: 110, Val: "NIKON D2H"}`,
CompressedBitsPerPixel: `{Id: 9102, Val: ["4/1"]}`,
FocalLength: `{Id: 920A, Val: ["2333/100"]}`,
SceneType: `{Id: A301, Val: ""}`,
DateTime: `{Id: 132, Val: "2005:07:02 10:38:28"}`,
ThumbJPEGInterchangeFormat: `{Id: 201, Val: [1088]}`,
Contrast: `{Id: A408, Val: [1]}`,
GPSLongitude: `{Id: 4, Val: ["116/1","23/1","27/1"]}`,
ExposureProgram: `{Id: 8822, Val: [3]}`,
XResolution: `{Id: 11A, Val: ["256/1"]}`,
SensingMethod: `{Id: A217, Val: [2]}`,
GPSLatitude: `{Id: 2, Val: ["39/1","54/1","56/1"]}`,
Make: `{Id: 10F, Val: "NIKON CORPORATION"}`,
ColorSpace: `{Id: A001, Val: [65535]}`,
Software: `{Id: 131, Val: "Opanda PowerExif"}`,
DateTimeOriginal: `{Id: 9003, Val: "2003:11:23 18:07:37"}`,
MaxApertureValue: `{Id: 9205, Val: ["3/1"]}`,
LightSource: `{Id: 9208, Val: [0]}`,
SceneCaptureType: `{Id: A406, Val: [0]}`,
GPSLongitudeRef: `{Id: 3, Val: "E"}`,
ResolutionUnit: `{Id: 128, Val: [2]}`,
SubSecTimeDigitized: `{Id: 9292, Val: "63"}`,
CFAPattern: `{Id: A302, Val: ""}`,
WhiteBalance: `{Id: A403, Val: [0]}`,
GPSDateStamp: `{Id: 1D, Val: "2003:11:23"}`,
ExposureTime: `{Id: 829A, Val: ["1/125"]}`,
},
}

View File

@ -0,0 +1,60 @@
package main
import (
"flag"
"fmt"
"log"
"os"
"github.com/rwcarlsen/goexif/exif"
"github.com/rwcarlsen/goexif/mknote"
"github.com/rwcarlsen/goexif/tiff"
)
var mnote = flag.Bool("mknote", false, "try to parse makernote data")
var thumb = flag.Bool("thumb", false, "dump thumbail data to stdout (for first listed image file)")
func main() {
flag.Parse()
fnames := flag.Args()
if *mnote {
exif.RegisterParsers(mknote.All...)
}
for _, name := range fnames {
f, err := os.Open(name)
if err != nil {
log.Printf("err on %v: %v", name, err)
continue
}
x, err := exif.Decode(f)
if err != nil {
log.Printf("err on %v: %v", name, err)
continue
}
if *thumb {
data, err := x.JpegThumbnail()
if err != nil {
log.Fatal("no thumbnail present")
}
if _, err := os.Stdout.Write(data); err != nil {
log.Fatal(err)
}
return
}
fmt.Printf("\n---- Image '%v' ----\n", name)
x.Walk(Walker{})
}
}
type Walker struct{}
func (_ Walker) Walk(name exif.FieldName, tag *tiff.Tag) error {
data, _ := tag.MarshalJSON()
fmt.Printf(" %v: %v\n", name, string(data))
return nil
}

View File

@ -0,0 +1,268 @@
package mknote
import "github.com/rwcarlsen/goexif/exif"
// Useful resources used in creating these tables:
// http://www.exiv2.org/makernote.html
// http://www.exiv2.org/tags-canon.html
// http://www.exiv2.org/tags-nikon.html
// Known Maker Note fields
const (
// common fields
ISOSpeed exif.FieldName = "ISOSpeed"
ColorMode = "ColorMode"
Quality = "Quality"
Sharpening = "Sharpening"
Focus = "Focus"
FlashSetting = "FlashSetting"
FlashDevice = "FlashDevice"
WhiteBalanceBias = "WhiteBalanceBias"
WB_RBLevels = "WB_RBLevels"
ProgramShift = "ProgramShift"
ExposureDiff = "ExposureDiff"
ISOSelection = "ISOSelection"
DataDump = "DataDump"
Preview = "Preview"
FlashComp = "FlashComp"
ISOSettings = "ISOSettings"
ImageBoundary = "ImageBoundary"
FlashExposureComp = "FlashExposureComp"
FlashBracketComp = "FlashBracketComp"
ExposureBracketComp = "ExposureBracketComp"
ImageProcessing = "ImageProcessing"
CropHiSpeed = "CropHiSpeed"
ExposureTuning = "ExposureTuning"
SerialNumber = "SerialNumber"
ImageAuthentication = "ImageAuthentication"
ActiveDLighting = "ActiveDLighting"
VignetteControl = "VignetteControl"
ImageAdjustment = "ImageAdjustment"
ToneComp = "ToneComp"
AuxiliaryLens = "AuxiliaryLens"
LensType = "LensType"
Lens = "Lens"
FocusDistance = "FocusDistance"
DigitalZoom = "DigitalZoom"
FlashMode = "FlashMode"
ShootingMode = "ShootingMode"
AutoBracketRelease = "AutoBracketRelease"
LensFStops = "LensFStops"
ContrastCurve = "ContrastCurve"
ColorHue = "ColorHue"
SceneMode = "SceneMode"
HueAdjustment = "HueAdjustment"
NEFCompression = "NEFCompression"
NoiseReduction = "NoiseReduction"
LinearizationTable = "LinearizationTable"
RawImageCenter = "RawImageCenter"
SensorPixelSize = "SensorPixelSize"
SceneAssist = "SceneAssist"
RetouchHistory = "RetouchHistory"
ImageDataSize = "ImageDataSize"
ImageCount = "ImageCount"
DeletedImageCount = "DeletedImageCount"
ShutterCount = "ShutterCount"
ImageOptimization = "ImageOptimization"
SaturationText = "SaturationText"
VariProgram = "VariProgram"
ImageStabilization = "ImageStabilization"
AFResponse = "AFResponse"
HighISONoiseReduction = "HighISONoiseReduction"
ToningEffect = "ToningEffect"
PrintIM = "PrintIM"
CaptureData = "CaptureData"
CaptureVersion = "CaptureVersion"
CaptureOffsets = "CaptureOffsets"
ScanIFD = "ScanIFD"
ICCProfile = "ICCProfile"
CaptureOutput = "CaptureOutput"
Panorama = "Panorama"
ImageType = "ImageType"
FirmwareVersion = "FirmwareVersion"
FileNumber = "FileNumber"
OwnerName = "OwnerName"
CameraInfo = "CameraInfo"
CustomFunctions = "CustomFunctions"
ModelID = "ModelID"
PictureInfo = "PictureInfo"
ThumbnailImageValidArea = "ThumbnailImageValidArea"
SerialNumberFormat = "SerialNumberFormat"
SuperMacro = "SuperMacro"
OriginalDecisionDataOffset = "OriginalDecisionDataOffset"
WhiteBalanceTable = "WhiteBalanceTable"
LensModel = "LensModel"
InternalSerialNumber = "InternalSerialNumber"
DustRemovalData = "DustRemovalData"
ProcessingInfo = "ProcessingInfo"
MeasuredColor = "MeasuredColor"
VRDOffset = "VRDOffset"
SensorInfo = "SensorInfo"
ColorData = "ColorData"
// Nikon-specific fields
Nikon_Version = "Nikon.Version"
Nikon_WhiteBalance = "Nikon.WhiteBalance"
Nikon_ColorSpace = "Nikon.ColorSpace"
Nikon_LightSource = "Nikon.LightSource"
Nikon_Saturation = "Nikon_Saturation"
Nikon_ShotInfo = "Nikon.ShotInfo" // A sub-IFD
Nikon_VRInfo = "Nikon.VRInfo" // A sub-IFD
Nikon_PictureControl = "Nikon.PictureControl" // A sub-IFD
Nikon_WorldTime = "Nikon.WorldTime" // A sub-IFD
Nikon_ISOInfo = "Nikon.ISOInfo" // A sub-IFD
Nikon_AFInfo = "Nikon.AFInfo" // A sub-IFD
Nikon_ColorBalance = "Nikon.ColorBalance" // A sub-IFD
Nikon_LensData = "Nikon.LensData" // A sub-IFD
Nikon_SerialNO = "Nikon.SerialNO" // usually starts with "NO="
Nikon_FlashInfo = "Nikon.FlashInfo" // A sub-IFD
Nikon_MultiExposure = "Nikon.MultiExposure" // A sub-IFD
Nikon_AFInfo2 = "Nikon.AFInfo2" // A sub-IFD
Nikon_FileInfo = "Nikon.FileInfo" // A sub-IFD
Nikon_AFTune = "Nikon.AFTune" // A sub-IFD
Nikon3_0x000a = "Nikon3.0x000a"
Nikon3_0x009b = "Nikon3.0x009b"
Nikon3_0x009f = "Nikon3.0x009f"
Nikon3_0x00a3 = "Nikon3.0x00a3"
// Canon-specific fiends
Canon_CameraSettings = "Canon.CameraSettings" // A sub-IFD
Canon_ShotInfo = "Canon.ShotInfo" // A sub-IFD
Canon_AFInfo = "Canon.AFInfo"
Canon_0x0000 = "Canon.0x0000"
Canon_0x0003 = "Canon.0x0003"
Canon_0x00b5 = "Canon.0x00b5"
Canon_0x00c0 = "Canon.0x00c0"
Canon_0x00c1 = "Canon.0x00c1"
)
var makerNoteCanonFields = map[uint16]exif.FieldName{
0x0000: Canon_0x0000,
0x0001: Canon_CameraSettings,
0x0002: exif.FocalLength,
0x0003: Canon_0x0003,
0x0004: Canon_ShotInfo,
0x0005: Panorama,
0x0006: ImageType,
0x0007: FirmwareVersion,
0x0008: FileNumber,
0x0009: OwnerName,
0x000c: SerialNumber,
0x000d: CameraInfo,
0x000f: CustomFunctions,
0x0010: ModelID,
0x0012: PictureInfo,
0x0013: ThumbnailImageValidArea,
0x0015: SerialNumberFormat,
0x001a: SuperMacro,
0x0026: Canon_AFInfo,
0x0083: OriginalDecisionDataOffset,
0x00a4: WhiteBalanceTable,
0x0095: LensModel,
0x0096: InternalSerialNumber,
0x0097: DustRemovalData,
0x0099: CustomFunctions,
0x00a0: ProcessingInfo,
0x00aa: MeasuredColor,
0x00b4: exif.ColorSpace,
0x00b5: Canon_0x00b5,
0x00c0: Canon_0x00c0,
0x00c1: Canon_0x00c1,
0x00d0: VRDOffset,
0x00e0: SensorInfo,
0x4001: ColorData,
}
// Nikon version 3 Maker Notes fields (used by E5400, SQ, D2H, D70, and newer)
var makerNoteNikon3Fields = map[uint16]exif.FieldName{
0x0001: Nikon_Version,
0x0002: ISOSpeed,
0x0003: ColorMode,
0x0004: Quality,
0x0005: Nikon_WhiteBalance,
0x0006: Sharpening,
0x0007: Focus,
0x0008: FlashSetting,
0x0009: FlashDevice,
0x000a: Nikon3_0x000a,
0x000b: WhiteBalanceBias,
0x000c: WB_RBLevels,
0x000d: ProgramShift,
0x000e: ExposureDiff,
0x000f: ISOSelection,
0x0010: DataDump,
0x0011: Preview,
0x0012: FlashComp,
0x0013: ISOSettings,
0x0016: ImageBoundary,
0x0017: FlashExposureComp,
0x0018: FlashBracketComp,
0x0019: ExposureBracketComp,
0x001a: ImageProcessing,
0x001b: CropHiSpeed,
0x001c: ExposureTuning,
0x001d: SerialNumber,
0x001e: Nikon_ColorSpace,
0x001f: Nikon_VRInfo,
0x0020: ImageAuthentication,
0x0022: ActiveDLighting,
0x0023: Nikon_PictureControl,
0x0024: Nikon_WorldTime,
0x0025: Nikon_ISOInfo,
0x002a: VignetteControl,
0x0080: ImageAdjustment,
0x0081: ToneComp,
0x0082: AuxiliaryLens,
0x0083: LensType,
0x0084: Lens,
0x0085: FocusDistance,
0x0086: DigitalZoom,
0x0087: FlashMode,
0x0088: Nikon_AFInfo,
0x0089: ShootingMode,
0x008a: AutoBracketRelease,
0x008b: LensFStops,
0x008c: ContrastCurve,
0x008d: ColorHue,
0x008f: SceneMode,
0x0090: Nikon_LightSource,
0x0091: Nikon_ShotInfo,
0x0092: HueAdjustment,
0x0093: NEFCompression,
0x0094: Nikon_Saturation,
0x0095: NoiseReduction,
0x0096: LinearizationTable,
0x0097: Nikon_ColorBalance,
0x0098: Nikon_LensData,
0x0099: RawImageCenter,
0x009a: SensorPixelSize,
0x009b: Nikon3_0x009b,
0x009c: SceneAssist,
0x009e: RetouchHistory,
0x009f: Nikon3_0x009f,
0x00a0: Nikon_SerialNO,
0x00a2: ImageDataSize,
0x00a3: Nikon3_0x00a3,
0x00a5: ImageCount,
0x00a6: DeletedImageCount,
0x00a7: ShutterCount,
0x00a8: Nikon_FlashInfo,
0x00a9: ImageOptimization,
0x00aa: SaturationText,
0x00ab: VariProgram,
0x00ac: ImageStabilization,
0x00ad: AFResponse,
0x00b0: Nikon_MultiExposure,
0x00b1: HighISONoiseReduction,
0x00b3: ToningEffect,
0x00b7: Nikon_AFInfo2,
0x00b8: Nikon_FileInfo,
0x00b9: Nikon_AFTune,
0x0e00: PrintIM,
0x0e01: CaptureData,
0x0e09: CaptureVersion,
0x0e0e: CaptureOffsets,
0x0e10: ScanIFD,
0x0e1d: ICCProfile,
0x0e1e: CaptureOutput,
}

View File

@ -0,0 +1,70 @@
// Package mknote provides makernote parsers that can be used with goexif/exif.
package mknote
import (
"bytes"
"github.com/rwcarlsen/goexif/exif"
"github.com/rwcarlsen/goexif/tiff"
)
var (
// Canon is an exif.Parser for canon makernote data.
Canon = &canon{}
// NikonV3 is an exif.Parser for nikon makernote data.
NikonV3 = &nikonV3{}
// All is a list of all available makernote parsers
All = []exif.Parser{Canon, NikonV3}
)
type canon struct{}
// Parse decodes all Canon makernote data found in x and adds it to x.
func (_ *canon) Parse(x *exif.Exif) error {
m, err := x.Get(exif.MakerNote)
if err != nil {
return nil
}
mk, err := x.Get(exif.Make)
if err != nil {
return nil
}
if mk.StringVal() != "Canon" {
return nil
}
// Canon notes are a single IFD directory with no header.
// Reader offsets need to be w.r.t. the original tiff structure.
buf := bytes.NewReader(append(make([]byte, m.ValOffset), m.Val...))
buf.Seek(int64(m.ValOffset), 0)
mkNotesDir, _, err := tiff.DecodeDir(buf, x.Tiff.Order)
if err != nil {
return err
}
x.LoadTags(mkNotesDir, makerNoteCanonFields, false)
return nil
}
type nikonV3 struct{}
// Parse decodes all Nikon makernote data found in x and adds it to x.
func (_ *nikonV3) Parse(x *exif.Exif) error {
m, err := x.Get(exif.MakerNote)
if err != nil {
return nil
} else if bytes.Compare(m.Val[:6], []byte("Nikon\000")) != 0 {
return nil
}
// Nikon v3 maker note is a self-contained IFD (offsets are relative
// to the start of the maker note)
mkNotes, err := tiff.Decode(bytes.NewReader(m.Val[10:]))
if err != nil {
return err
}
x.LoadTags(mkNotes.Dirs[0], makerNoteNikon3Fields, false)
return nil
}

View File

@ -5,16 +5,19 @@ import (
"encoding/binary"
"errors"
"fmt"
"io"
"math/big"
"strings"
"unicode"
"unicode/utf8"
)
type FormatType int
// TypeCategory specifies the Go type equivalent used to represent the basic
// tiff data types.
type TypeCategory int
const (
IntVal FormatType = iota
IntVal TypeCategory = iota
FloatVal
RatVal
StringVal
@ -22,32 +25,55 @@ const (
OtherVal
)
var fmtSize = map[uint16]uint32{
1: 1,
2: 1,
3: 2,
4: 4,
5: 8,
6: 1,
7: 1,
8: 2,
9: 4,
10: 8,
11: 4,
12: 8,
// DataType represents the basic tiff tag data types.
type DataType uint16
const (
DTByte DataType = 1
DTAscii = 2
DTShort = 3
DTLong = 4
DTRational = 5
DTSByte = 6
DTUndefined = 7
DTSShort = 8
DTSLong = 9
DTSRational = 10
DTFloat = 11
DTDouble = 12
)
// typeSize specifies the size in bytes of each type.
var typeSize = map[DataType]uint32{
DTByte: 1,
DTAscii: 1,
DTShort: 2,
DTLong: 4,
DTRational: 8,
DTSByte: 1,
DTUndefined: 1,
DTSShort: 2,
DTSLong: 4,
DTSRational: 8,
DTFloat: 4,
DTDouble: 8,
}
// Tag reflects the parsed content of a tiff IFD tag.
type Tag struct {
// Id is the 2-byte tiff tag identifier
// Id is the 2-byte tiff tag identifier.
Id uint16
// Fmt is an integer (1 through 12) indicating the tag value's format.
Fmt uint16
// Ncomp is the number of type Fmt stored in the tag's value (i.e. the tag's
// value is an array of type Fmt and length Ncomp).
Ncomp uint32
// Type is an integer (1 through 12) indicating the tag value's data type.
Type DataType
// Count is the number of type Type stored in the tag's value (i.e. the
// tag's value is an array of type Type and length Count).
Count uint32
// Val holds the bytes that represent the tag's value.
Val []byte
// ValOffset holds byte offset of the tag value w.r.t. the beginning of the
// reader it was decoded from. Zero if the tag value fit inside the offset
// field.
ValOffset uint32
order binary.ByteOrder
@ -57,10 +83,10 @@ type Tag struct {
strVal string
}
// DecodeTag parses a tiff-encoded IFD tag from r and returns Tag object. The
// DecodeTag parses a tiff-encoded IFD tag from r and returns a Tag object. The
// first read from r should be the first byte of the tag. ReadAt offsets should
// be relative to the beginning of the tiff structure (not relative to the
// beginning of the tag).
// generally be relative to the beginning of the tiff structure (not relative
// to the beginning of the tag).
func DecodeTag(r ReadAtReader, order binary.ByteOrder) (*Tag, error) {
t := new(Tag)
t.order = order
@ -70,35 +96,32 @@ func DecodeTag(r ReadAtReader, order binary.ByteOrder) (*Tag, error) {
return nil, errors.New("tiff: tag id read failed: " + err.Error())
}
err = binary.Read(r, order, &t.Fmt)
err = binary.Read(r, order, &t.Type)
if err != nil {
return nil, errors.New("tiff: tag format read failed: " + err.Error())
return nil, errors.New("tiff: tag type read failed: " + err.Error())
}
err = binary.Read(r, order, &t.Ncomp)
err = binary.Read(r, order, &t.Count)
if err != nil {
return nil, errors.New("tiff: tag component count read failed: " + err.Error())
}
valLen := fmtSize[t.Fmt] * t.Ncomp
var offset uint32
valLen := typeSize[t.Type] * t.Count
if valLen > 4 {
binary.Read(r, order, &offset)
binary.Read(r, order, &t.ValOffset)
t.Val = make([]byte, valLen)
n, err := r.ReadAt(t.Val, int64(offset))
n, err := r.ReadAt(t.Val, int64(t.ValOffset))
if n != int(valLen) || err != nil {
return nil, errors.New("tiff: tag value read failed: " + err.Error())
return t, errors.New("tiff: tag value read failed: " + err.Error())
}
} else {
val := make([]byte, valLen)
n, err := r.Read(val)
if err != nil || n != int(valLen) {
return nil, errors.New("tiff: tag offset read failed: " + err.Error())
if _, err = io.ReadFull(r, val); err != nil {
return t, errors.New("tiff: tag offset read failed: " + err.Error())
}
n, err = r.Read(make([]byte, 4-valLen))
if err != nil || n != 4-int(valLen) {
return nil, errors.New("tiff: tag offset read failed: " + err.Error())
// ignore padding.
if _, err = io.ReadFull(r, make([]byte, 4-valLen)); err != nil {
return t, errors.New("tiff: tag offset read failed: " + err.Error())
}
t.Val = val
@ -112,62 +135,62 @@ func DecodeTag(r ReadAtReader, order binary.ByteOrder) (*Tag, error) {
func (t *Tag) convertVals() {
r := bytes.NewReader(t.Val)
switch t.Fmt {
case 2: // ascii string
switch t.Type {
case DTAscii:
if len(t.Val) > 0 {
t.strVal = string(t.Val[:len(t.Val)-1])
t.strVal = string(t.Val[:len(t.Val)-1]) // ignore the last byte (NULL).
}
case 1:
case DTByte:
var v uint8
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 3:
case DTShort:
var v uint16
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 4:
case DTLong:
var v uint32
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 6:
case DTSByte:
var v int8
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 8:
case DTSShort:
var v int16
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 9:
case DTSLong:
var v int32
t.intVals = make([]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
t.intVals = make([]int64, int(t.Count))
for i := range t.intVals {
err := binary.Read(r, t.order, &v)
panicOn(err)
t.intVals[i] = int64(v)
}
case 5: // unsigned rational
t.ratVals = make([][]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
case DTRational:
t.ratVals = make([][]int64, int(t.Count))
for i := range t.ratVals {
var n, d uint32
err := binary.Read(r, t.order, &n)
panicOn(err)
@ -175,9 +198,9 @@ func (t *Tag) convertVals() {
panicOn(err)
t.ratVals[i] = []int64{int64(n), int64(d)}
}
case 10: // signed rational
t.ratVals = make([][]int64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
case DTSRational:
t.ratVals = make([][]int64, int(t.Count))
for i := range t.ratVals {
var n, d int32
err := binary.Read(r, t.order, &n)
panicOn(err)
@ -185,17 +208,17 @@ func (t *Tag) convertVals() {
panicOn(err)
t.ratVals[i] = []int64{int64(n), int64(d)}
}
case 11: // float32
t.floatVals = make([]float64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
case DTFloat: // float32
t.floatVals = make([]float64, int(t.Count))
for i := range t.floatVals {
var v float32
err := binary.Read(r, t.order, &v)
panicOn(err)
t.floatVals[i] = float64(v)
}
case 12: // float64 (double)
t.floatVals = make([]float64, int(t.Ncomp))
for i := 0; i < int(t.Ncomp); i++ {
case DTDouble:
t.floatVals = make([]float64, int(t.Count))
for i := range t.floatVals {
var u float64
err := binary.Read(r, t.order, &u)
panicOn(err)
@ -204,65 +227,65 @@ func (t *Tag) convertVals() {
}
}
// Format returns a value indicating which method can be called to retrieve the
// TypeCategory returns a value indicating which method can be called to retrieve the
// tag's value properly typed (e.g. integer, rational, etc.).
func (t *Tag) Format() FormatType {
switch t.Fmt {
case 1, 3, 4, 6, 8, 9:
func (t *Tag) TypeCategory() TypeCategory {
switch t.Type {
case DTByte, DTShort, DTLong, DTSByte, DTSShort, DTSLong:
return IntVal
case 5, 10:
case DTRational, DTSRational:
return RatVal
case 11, 12:
case DTFloat, DTDouble:
return FloatVal
case 2:
case DTAscii:
return StringVal
case 7:
case DTUndefined:
return UndefVal
}
return OtherVal
}
// Rat returns the tag's i'th value as a rational number. It panics if the tag format
// is not RatVal, if the denominator is zero, or if the tag has no i'th
// component. If a denominator could be zero, use Rat2.
// Rat returns the tag's i'th value as a rational number. It panics if the tag
// TypeCategory is not RatVal, if the denominator is zero, or if the tag has no
// i'th component. If a denominator could be zero, use Rat2.
func (t *Tag) Rat(i int) *big.Rat {
n, d := t.Rat2(i)
return big.NewRat(n, d)
}
// Rat2 returns the tag's i'th value as a rational number represented by a
// numerator-denominator pair. It panics if the tag format is not RatVal
// numerator-denominator pair. It panics if the tag TypeCategory is not RatVal
// or if the tag value has no i'th component.
func (t *Tag) Rat2(i int) (num, den int64) {
if t.Format() != RatVal {
panic("Tag format is not 'rational'")
if t.TypeCategory() != RatVal {
panic("Tag type category is not 'rational'")
}
return t.ratVals[i][0], t.ratVals[i][1]
}
// Int returns the tag's i'th value as an integer. It panics if the tag format is not
// IntVal or if the tag value has no i'th component.
// Int returns the tag's i'th value as an integer. It panics if the tag
// TypeCategory is not IntVal or if the tag value has no i'th component.
func (t *Tag) Int(i int) int64 {
if t.Format() != IntVal {
panic("Tag format is not 'int'")
if t.TypeCategory() != IntVal {
panic("Tag type category is not 'int'")
}
return t.intVals[i]
}
// Float returns the tag's i'th value as a float. It panics if the tag format is not
// FloatVal or if the tag value has no i'th component.
// Float returns the tag's i'th value as a float. It panics if the tag
// TypeCategory is not FloatVal or if the tag value has no i'th component.
func (t *Tag) Float(i int) float64 {
if t.Format() != FloatVal {
panic("Tag format is not 'float'")
if t.TypeCategory() != FloatVal {
panic("Tag type category is not 'float'")
}
return t.floatVals[i]
}
// StringVal returns the tag's value as a string. It panics if the tag
// format is not StringVal
// TypeCategory is not StringVal.
func (t *Tag) StringVal() string {
if t.Format() != StringVal {
panic("Tag format is not 'ascii string'")
if t.TypeCategory() != StringVal {
panic("Tag type category is not 'ascii string'")
}
return t.strVal
}
@ -276,17 +299,17 @@ func (t *Tag) String() string {
}
func (t *Tag) MarshalJSON() ([]byte, error) {
f := t.Format()
f := t.TypeCategory()
switch f {
case StringVal, UndefVal:
return nullString(t.Val), nil
case OtherVal:
panic(fmt.Sprintf("Unhandled type Fmt=%v", t.Fmt))
panic(fmt.Sprintf("Unhandled tag type=%v", t.Type))
}
rv := []string{}
for i := 0; i < int(t.Ncomp); i++ {
for i := 0; i < int(t.Count); i++ {
switch f {
case RatVal:
n, d := t.Rat2(i)

View File

@ -1,10 +1,12 @@
// Package tiff implements TIFF decoding as defined in TIFF 6.0 specification.
// Package tiff implements TIFF decoding as defined in TIFF 6.0 specification at
// http://partners.adobe.com/public/developer/en/tiff/TIFF6.pdf
package tiff
import (
"bytes"
"encoding/binary"
"errors"
"fmt"
"io"
"io/ioutil"
)
@ -15,7 +17,7 @@ type ReadAtReader interface {
io.ReaderAt
}
// Tiff provides access to decoded tiff data.
// Tiff provides access to a decoded tiff data structure.
type Tiff struct {
// Dirs is an ordered slice of the tiff's Image File Directories (IFDs).
// The IFD at index 0 is IFD0.
@ -24,10 +26,10 @@ type Tiff struct {
Order binary.ByteOrder
}
// Decode parses tiff-encoded data from r and returns a Tiff that reflects the
// structure and content of the tiff data. The first read from r should be the
// first byte of the tiff-encoded data (not necessarily the first byte of an
// os.File object).
// Decode parses tiff-encoded data from r and returns a Tiff struct that
// reflects the structure and content of the tiff data. The first read from r
// should be the first byte of the tiff-encoded data and not necessarily the
// first byte of an os.File object.
func Decode(r io.Reader) (*Tiff, error) {
data, err := ioutil.ReadAll(r)
if err != nil {
@ -39,23 +41,21 @@ func Decode(r io.Reader) (*Tiff, error) {
// read byte order
bo := make([]byte, 2)
n, err := buf.Read(bo)
if n < len(bo) || err != nil {
if _, err = io.ReadFull(buf, bo); err != nil {
return nil, errors.New("tiff: could not read tiff byte order")
}
if string(bo) == "II" {
t.Order = binary.LittleEndian
} else if string(bo) == "MM" {
t.Order = binary.BigEndian
} else {
if string(bo) == "II" {
t.Order = binary.LittleEndian
} else if string(bo) == "MM" {
t.Order = binary.BigEndian
} else {
return nil, errors.New("tiff: could not read tiff byte order")
}
return nil, errors.New("tiff: could not read tiff byte order")
}
// check for special tiff marker
var sp int16
err = binary.Read(buf, t.Order, &sp)
if err != nil || 0x002A != sp {
if err != nil || 42 != sp {
return nil, errors.New("tiff: could not find special tiff marker")
}
@ -91,22 +91,24 @@ func Decode(r io.Reader) (*Tiff, error) {
}
func (tf *Tiff) String() string {
s := "Tiff{"
var buf bytes.Buffer
fmt.Fprint(&buf, "Tiff{")
for _, d := range tf.Dirs {
s += d.String() + ", "
fmt.Fprintf(&buf, "%s, ", d.String())
}
return s + "}"
fmt.Fprintf(&buf, "}")
return buf.String()
}
// Dir reflects the parsed content of a tiff Image File Directory (IFD).
// Dir provides access to the parsed content of a tiff Image File Directory (IFD).
type Dir struct {
Tags []*Tag
}
// DecodeDir parses a tiff-encoded IFD from r and returns a Dir object. offset
// is the offset to the next IFD. The first read from r should be at the first
// byte of the IFD. ReadAt offsets should be relative to the beginning of the
// tiff structure (not relative to the beginning of the IFD).
// byte of the IFD. ReadAt offsets should generally be relative to the
// beginning of the tiff structure (not relative to the beginning of the IFD).
func DecodeDir(r ReadAtReader, order binary.ByteOrder) (d *Dir, offset int32, err error) {
d = new(Dir)
@ -114,7 +116,7 @@ func DecodeDir(r ReadAtReader, order binary.ByteOrder) (d *Dir, offset int32, er
var nTags int16
err = binary.Read(r, order, &nTags)
if err != nil {
return nil, 0, errors.New("tiff: falied to read IFD tag count: " + err.Error())
return nil, 0, errors.New("tiff: failed to read IFD tag count: " + err.Error())
}
// load tags

View File

@ -4,10 +4,14 @@ import (
"bytes"
"encoding/binary"
"encoding/hex"
"flag"
"os"
"path/filepath"
"testing"
)
var dataDir = flag.String("test_data_dir", ".", "Directory where the data files for testing are located")
type input struct {
tgId string
tpe string
@ -17,10 +21,10 @@ type input struct {
}
type output struct {
id uint16
format uint16
count uint32
val []byte
id uint16
typ DataType
count uint32
val []byte
}
type tagTest struct {
@ -38,97 +42,97 @@ var set1 = []tagTest{
// {"TgId", "TYPE", "N-VALUES", "OFFSET--", "VAL..."},
input{"0003", "0002", "00000002", "11000000", ""},
input{"0300", "0200", "02000000", "11000000", ""},
output{0x0003, 0x0002, 0x0002, []byte{0x11, 0x00}},
output{0x0003, DataType(0x0002), 0x0002, []byte{0x11, 0x00}},
},
tagTest{
input{"0001", "0002", "00000006", "00000012", "111213141516"},
input{"0100", "0200", "06000000", "12000000", "111213141516"},
output{0x0001, 0x0002, 0x0006, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
output{0x0001, DataType(0x0002), 0x0006, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
},
//////////// int (1-byte) type ////////////////
tagTest{
input{"0001", "0001", "00000001", "11000000", ""},
input{"0100", "0100", "01000000", "11000000", ""},
output{0x0001, 0x0001, 0x0001, []byte{0x11}},
output{0x0001, DataType(0x0001), 0x0001, []byte{0x11}},
},
tagTest{
input{"0001", "0001", "00000005", "00000010", "1112131415"},
input{"0100", "0100", "05000000", "10000000", "1112131415"},
output{0x0001, 0x0001, 0x0005, []byte{0x11, 0x12, 0x13, 0x14, 0x15}},
output{0x0001, DataType(0x0001), 0x0005, []byte{0x11, 0x12, 0x13, 0x14, 0x15}},
},
tagTest{
input{"0001", "0006", "00000001", "11000000", ""},
input{"0100", "0600", "01000000", "11000000", ""},
output{0x0001, 0x0006, 0x0001, []byte{0x11}},
output{0x0001, DataType(0x0006), 0x0001, []byte{0x11}},
},
tagTest{
input{"0001", "0006", "00000005", "00000010", "1112131415"},
input{"0100", "0600", "05000000", "10000000", "1112131415"},
output{0x0001, 0x0006, 0x0005, []byte{0x11, 0x12, 0x13, 0x14, 0x15}},
output{0x0001, DataType(0x0006), 0x0005, []byte{0x11, 0x12, 0x13, 0x14, 0x15}},
},
//////////// int (2-byte) types ////////////////
tagTest{
input{"0001", "0003", "00000002", "11111212", ""},
input{"0100", "0300", "02000000", "11111212", ""},
output{0x0001, 0x0003, 0x0002, []byte{0x11, 0x11, 0x12, 0x12}},
output{0x0001, DataType(0x0003), 0x0002, []byte{0x11, 0x11, 0x12, 0x12}},
},
tagTest{
input{"0001", "0003", "00000003", "00000010", "111213141516"},
input{"0100", "0300", "03000000", "10000000", "111213141516"},
output{0x0001, 0x0003, 0x0003, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
output{0x0001, DataType(0x0003), 0x0003, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
},
tagTest{
input{"0001", "0008", "00000001", "11120000", ""},
input{"0100", "0800", "01000000", "11120000", ""},
output{0x0001, 0x0008, 0x0001, []byte{0x11, 0x12}},
output{0x0001, DataType(0x0008), 0x0001, []byte{0x11, 0x12}},
},
tagTest{
input{"0001", "0008", "00000003", "00000100", "111213141516"},
input{"0100", "0800", "03000000", "00100000", "111213141516"},
output{0x0001, 0x0008, 0x0003, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
output{0x0001, DataType(0x0008), 0x0003, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16}},
},
//////////// int (4-byte) types ////////////////
tagTest{
input{"0001", "0004", "00000001", "11121314", ""},
input{"0100", "0400", "01000000", "11121314", ""},
output{0x0001, 0x0004, 0x0001, []byte{0x11, 0x12, 0x13, 0x14}},
output{0x0001, DataType(0x0004), 0x0001, []byte{0x11, 0x12, 0x13, 0x14}},
},
tagTest{
input{"0001", "0004", "00000002", "00000010", "1112131415161718"},
input{"0100", "0400", "02000000", "10000000", "1112131415161718"},
output{0x0001, 0x0004, 0x0002, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
output{0x0001, DataType(0x0004), 0x0002, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
},
tagTest{
input{"0001", "0009", "00000001", "11121314", ""},
input{"0100", "0900", "01000000", "11121314", ""},
output{0x0001, 0x0009, 0x0001, []byte{0x11, 0x12, 0x13, 0x14}},
output{0x0001, DataType(0x0009), 0x0001, []byte{0x11, 0x12, 0x13, 0x14}},
},
tagTest{
input{"0001", "0009", "00000002", "00000011", "1112131415161819"},
input{"0100", "0900", "02000000", "11000000", "1112131415161819"},
output{0x0001, 0x0009, 0x0002, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
output{0x0001, DataType(0x0009), 0x0002, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
},
//////////// rational types ////////////////////
tagTest{
input{"0001", "0005", "00000001", "00000010", "1112131415161718"},
input{"0100", "0500", "01000000", "10000000", "1112131415161718"},
output{0x0001, 0x0005, 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
output{0x0001, DataType(0x0005), 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
},
tagTest{
input{"0001", "000A", "00000001", "00000011", "1112131415161819"},
input{"0100", "0A00", "01000000", "11000000", "1112131415161819"},
output{0x0001, 0x000A, 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
output{0x0001, DataType(0x000A), 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
},
//////////// float types ///////////////////////
tagTest{
input{"0001", "0005", "00000001", "00000010", "1112131415161718"},
input{"0100", "0500", "01000000", "10000000", "1112131415161718"},
output{0x0001, 0x0005, 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
output{0x0001, DataType(0x0005), 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}},
},
tagTest{
input{"0101", "000A", "00000001", "00000011", "1112131415161819"},
input{"0101", "0A00", "01000000", "11000000", "1112131415161819"},
output{0x0101, 0x000A, 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
output{0x0101, DataType(0x000A), 0x0001, []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19}},
},
}
@ -151,11 +155,11 @@ func testSingle(t *testing.T, order binary.ByteOrder, in input, out output, i in
if tg.Id != out.id {
t.Errorf("(%v) tag %v id decode: expected %v, got %v", order, i, out.id, tg.Id)
}
if tg.Fmt != out.format {
t.Errorf("(%v) tag %v format decode: expected %v, got %v", order, i, out.format, tg.Fmt)
if tg.Type != out.typ {
t.Errorf("(%v) tag %v type decode: expected %v, got %v", order, i, out.typ, tg.Type)
}
if tg.Ncomp != out.count {
t.Errorf("(%v) tag %v N-components decode: expected %v, got %v", order, i, out.count, tg.Ncomp)
if tg.Count != out.count {
t.Errorf("(%v) tag %v component count decode: expected %v, got %v", order, i, out.count, tg.Count)
}
if !bytes.Equal(tg.Val, out.val) {
t.Errorf("(%v) tag %v value decode: expected %v, got %v", order, i, out.val, tg.Val)
@ -188,7 +192,7 @@ func buildInput(in input, order binary.ByteOrder) []byte {
}
func TestDecode(t *testing.T) {
name := "sample1.tif"
name := filepath.Join(*dataDir, "sample1.tif")
f, err := os.Open(name)
if err != nil {
t.Fatalf("%v\n", err)
@ -212,7 +216,7 @@ func TestDecodeTag_blob(t *testing.T) {
t.Logf("tag: %v+\n", tg)
n, d := tg.Rat2(0)
t.Logf("tag rat val: %v\n", n, d)
t.Logf("tag rat val: %v/%v\n", n, d)
}
func data() []byte {