parent
ef275f97f4
commit
e4323b6047
|
@ -1,14 +1,32 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultContentType is the content type header the API expects
|
||||
DefaultContentType string = "application/json; charset=utf-8"
|
||||
)
|
||||
|
||||
type V1Client struct {
|
||||
token string
|
||||
connectionString string
|
||||
client http.Client
|
||||
}
|
||||
|
||||
func New(connection, token string) *V1Client {
|
||||
c := http.Client{}
|
||||
return &V1Client{
|
||||
client: c,
|
||||
connectionString: connection,
|
||||
token: token,
|
||||
}
|
||||
}
|
||||
|
||||
func (v1 *V1Client) SetConnectionString(connection string) {
|
||||
|
@ -19,14 +37,48 @@ func (v1 *V1Client) SetToken(token string) {
|
|||
v1.token = token
|
||||
}
|
||||
|
||||
// GET is the V1 GET function. By design it will check globally for all non 200
|
||||
// responses and return an error if a non 200 is encountered.
|
||||
func (v1 *V1Client) GET(format string, a ...interface{}) (*http.Response, error) {
|
||||
url := v1.Endpoint(fmt.Sprintf(format, a))
|
||||
return http.Get(url)
|
||||
str := fmt.Sprintf(format, a...)
|
||||
//logger.Debug("GET [%s]", str)
|
||||
url := v1.EndpointStr(str)
|
||||
buffer := &bytes.Buffer{}
|
||||
req, err := http.NewRequest("GET", url, buffer)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to generate new request: %v", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", DefaultContentType)
|
||||
req.Header.Set("X-Session-Id", v1.token)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return resp, fmt.Errorf("[%d]: unable to read body: %v", err)
|
||||
}
|
||||
return resp, fmt.Errorf("[%d]: %s", resp.StatusCode, body)
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (v1 *V1Client) Endpoint(str string) string {
|
||||
func (v1 *V1Client) EndpointStr(str string) string {
|
||||
if strings.HasPrefix("/", str) {
|
||||
return fmt.Sprintf("%s%s", v1.connectionString, str)
|
||||
str = fmt.Sprintf("%s%s", v1.connectionString, str)
|
||||
} else {
|
||||
str = fmt.Sprintf("%s/%s", v1.connectionString, str)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s", v1.connectionString, str)
|
||||
return str
|
||||
}
|
||||
|
||||
func (v1 *V1Client) EndpointURL(str string) (*url.URL, error) {
|
||||
if strings.HasPrefix("/", str) {
|
||||
str = fmt.Sprintf("%s%s", v1.connectionString, str)
|
||||
} else {
|
||||
str = fmt.Sprintf("%s/%s", v1.connectionString, str)
|
||||
}
|
||||
return url.Parse(str)
|
||||
}
|
||||
|
|
|
@ -89,12 +89,6 @@ func (v1 *V1Client) GetPhoto(uuid string) (*Photo, error) {
|
|||
return nil, fmt.Errorf("unable to parse body: %v", err)
|
||||
}
|
||||
|
||||
// The API returns HTML so we have to hack this shit up
|
||||
// TODO @kris-nova This is where we left off
|
||||
// TODO It looks like the API is returning HTML SMDH...
|
||||
//fmt.Println(string(bytes))
|
||||
//return nil, nil
|
||||
|
||||
err = json.Unmarshal(bytes, &photo)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to JSON unmarshal response body: %v", err)
|
||||
|
|
32
client.go
32
client.go
|
@ -4,6 +4,7 @@ import (
|
|||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
|
@ -19,18 +20,15 @@ const (
|
|||
DefaultLoopback string = "127.0.0.1"
|
||||
DefaultPort string = "8080"
|
||||
DefaultConnectionString string = "http://localhost:8080"
|
||||
DefaultTokenKey string = "X-Session-Id"
|
||||
)
|
||||
|
||||
// New is used to initialize a new *Client
|
||||
func New(auth ClientAuthenticator) *Client {
|
||||
|
||||
p := &Client{
|
||||
v1client: &api.V1Client{},
|
||||
authenticator: auth,
|
||||
func New(connectionString string) *Client {
|
||||
c := &Client{
|
||||
contentType: DefaultContentType,
|
||||
connectionString: DefaultConnectionString,
|
||||
}
|
||||
return p
|
||||
return c
|
||||
}
|
||||
|
||||
// Client represents a client to a Photoprism application
|
||||
|
@ -87,7 +85,8 @@ func (c *Client) V1() *api.V1Client {
|
|||
}
|
||||
|
||||
// Login is used to attempt to authenticate with the Photoprism API
|
||||
func (c *Client) Login() error {
|
||||
func (c *Client) Auth(auth ClientAuthenticator) error {
|
||||
c.authenticator = auth
|
||||
// @kris-nova We are returning V1 by default
|
||||
return c.LoginV1()
|
||||
}
|
||||
|
@ -101,13 +100,22 @@ func (c *Client) LoginV1() error {
|
|||
return fmt.Errorf("JSON marshal error: %v", err)
|
||||
}
|
||||
buffer := bytes.NewBuffer(body)
|
||||
resp, err := http.Post(c.Endpoint("v1/session"), c.contentType, buffer)
|
||||
resp, err := http.Post(c.Endpoint("api/v1/session"), c.contentType, buffer)
|
||||
if err != nil {
|
||||
return fmt.Errorf("authentication error: %v", err)
|
||||
}
|
||||
token := resp.Header.Get("X-Session-Id")
|
||||
c.v1client.SetToken(token)
|
||||
c.v1client.SetConnectionString(c.connectionString)
|
||||
if resp.StatusCode != 200 {
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to parse body for [%d] response: %v", resp.StatusCode, err)
|
||||
}
|
||||
return fmt.Errorf("login error [%d] %s", resp.StatusCode, body)
|
||||
}
|
||||
token := resp.Header.Get(DefaultTokenKey)
|
||||
if token == "" {
|
||||
return fmt.Errorf("missing auth token from successful login")
|
||||
}
|
||||
c.v1client = api.New(c.connectionString, token)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
token="c558cccdd25917056e8b7b72a2a3e5f40215d707a6fac1aa"
|
||||
server="localhost"
|
||||
port="8080"
|
||||
|
||||
function photoget() {
|
||||
url="http://${server}:${port}/${1}"
|
||||
curl --header "X-Session-Id: ${token}" --header "Content-Type: application/json" ${url}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
token=""
|
||||
server="localhost"
|
||||
port="8080"
|
||||
|
||||
function photoget() {
|
||||
url="http://${server}:${port}/${1}"
|
||||
curl --header "X-Session-Id: ${token}" --header "Content-Type: application/json" ${url} | jq
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
photoprism "github.com/kris-nova/client-go"
|
||||
"github.com/kris-nova/logger"
|
||||
)
|
||||
|
||||
func main() {
|
||||
logger.Level = 4
|
||||
creds := photoprism.NewClientAuthLogin("admin", "missy")
|
||||
client := photoprism.New("localhost:8080")
|
||||
err := client.Auth(creds)
|
||||
if err != nil {
|
||||
halt(4, "Error logging into API: %v", err)
|
||||
}
|
||||
logger.Always("Login Success!")
|
||||
}
|
|
@ -9,13 +9,15 @@ import (
|
|||
)
|
||||
|
||||
func main() {
|
||||
uuid := "pqnzigq156lndozm" // This is a known ID
|
||||
client := photoprism.New(auth())
|
||||
err := client.Login()
|
||||
logger.Level = 4
|
||||
uuid := "pqnzigq351j2fqgn" // This is a known ID
|
||||
creds := photoprism.NewClientAuthLogin("admin", "missy")
|
||||
client := photoprism.New("localhost:8080")
|
||||
err := client.Auth(creds)
|
||||
if err != nil {
|
||||
halt(4, "Error logging into API: %v", err)
|
||||
}
|
||||
logger.Always("Login Success!")
|
||||
//logger.Always("Login Success!")
|
||||
photo, err := client.V1().GetPhoto(uuid)
|
||||
if err != nil {
|
||||
halt(3, "Error fetching photo: %v", err)
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
module github.com/kris-nova/client-go
|
||||
|
||||
go 1.15
|
||||
|
||||
require (
|
||||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/fatih/color v1.10.0 // indirect
|
||||
github.com/gin-gonic/gin v1.6.3
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/jinzhu/gorm v1.9.16
|
||||
github.com/kris-nova/logger v0.0.0-20181127235838-fd0d87064b06
|
||||
github.com/kris-nova/lolgopher v0.0.0-20210112022122-73f0047e8b65 // indirect
|
||||
github.com/paulmach/go.geojson v1.4.0
|
||||
github.com/photoprism/photoprism v0.0.0-20210208220634-4730b996c9d5
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
|
@ -0,0 +1,457 @@
|
|||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
|
||||
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/araddon/dateparse v0.0.0-20210204225525-33e44430e129 h1:TsUg64/STzjr2oObSMkQEUhSWiyTdbAQf+WjO93De6E=
|
||||
github.com/araddon/dateparse v0.0.0-20210204225525-33e44430e129/go.mod h1:hMAUZFIkk4B1FouGxqlogyMyU6BwY/UiVmmbbzz9Up8=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||
github.com/djherbis/times v1.2.0 h1:xANXjsC/iBqbO00vkWlYwPWgBgEVU6m6AFYg0Pic+Mc=
|
||||
github.com/djherbis/times v1.2.0/go.mod h1:CGMZlo255K5r4Yw0b9RRfFQpM2y7uOmxg4jm9HsaVf8=
|
||||
github.com/dsoprea/go-exif/v2 v2.0.0-20200321225314-640175a69fe4/go.mod h1:Lm2lMM2zx8p4a34ZemkaUV95AnMl4ZvLbCUbwOvLC2E=
|
||||
github.com/dsoprea/go-exif/v2 v2.0.0-20200520183328-015129a9efd5/go.mod h1:9EXlPeHfblFFnwu5UOqmP2eoZfJyAZ2Ri/Vki33ajO0=
|
||||
github.com/dsoprea/go-exif/v2 v2.0.0-20200604193436-ca8584a0e1c4/go.mod h1:9EXlPeHfblFFnwu5UOqmP2eoZfJyAZ2Ri/Vki33ajO0=
|
||||
github.com/dsoprea/go-exif/v2 v2.0.0-20210131231135-d154f10435cc h1:F8AmoUFkSqzbZoGrIGWpQqbh3qosJl3h8zdVusOOggQ=
|
||||
github.com/dsoprea/go-exif/v2 v2.0.0-20210131231135-d154f10435cc/go.mod h1:oKrjk2kb3rAR5NbtSTLUMvMSbc+k8ZosI3MaVH47noc=
|
||||
github.com/dsoprea/go-exif/v3 v3.0.0-20200717053412-08f1b6708903/go.mod h1:0nsO1ce0mh5czxGeLo4+OCZ/C6Eo6ZlMWsz7rH/Gxv8=
|
||||
github.com/dsoprea/go-exif/v3 v3.0.0-20210131231135-d154f10435cc h1:WlJC9DefVe1OZKM04jD7jInkZ9Oyou+K6cpYOVPXq0o=
|
||||
github.com/dsoprea/go-exif/v3 v3.0.0-20210131231135-d154f10435cc/go.mod h1:cg5SNYKHMmzxsr9X6ZeLh/nfBRHHp5PngtEPcujONtk=
|
||||
github.com/dsoprea/go-heic-exif-extractor v0.0.0-20200717090456-b3d9dcddffd1 h1:R/EEzpxqQxeEcJ/z0EFTI1U6XsuOnepyp5o1uZg5c2E=
|
||||
github.com/dsoprea/go-heic-exif-extractor v0.0.0-20200717090456-b3d9dcddffd1/go.mod h1:UwRKreeVikXn5OarSnt4OqovcEjsIgZVuc5svj7G5w4=
|
||||
github.com/dsoprea/go-iptc v0.0.0-20200609062250-162ae6b44feb/go.mod h1:kYIdx9N9NaOyD7U6D+YtExN7QhRm+5kq7//yOsRXQtM=
|
||||
github.com/dsoprea/go-iptc v0.0.0-20200610044640-bc9ca208b413 h1:YDRiMEm32T60Kpm35YzOK9ZHgjsS1Qrid+XskNcsdp8=
|
||||
github.com/dsoprea/go-iptc v0.0.0-20200610044640-bc9ca208b413/go.mod h1:kYIdx9N9NaOyD7U6D+YtExN7QhRm+5kq7//yOsRXQtM=
|
||||
github.com/dsoprea/go-jpeg-image-structure v0.0.0-20210128210355-86b1014917f2 h1:ULCSN6v0WISNbALxomGPXh4dSjRKPW+7+seYoMz8UTc=
|
||||
github.com/dsoprea/go-jpeg-image-structure v0.0.0-20210128210355-86b1014917f2/go.mod h1:ZoOP3yUG0HD1T4IUjIFsz/2OAB2yB4YX6NSm4K+uJRg=
|
||||
github.com/dsoprea/go-logging v0.0.0-20190624164917-c4f10aab7696/go.mod h1:Nm/x2ZUNRW6Fe5C3LxdY1PyZY5wmDv/s5dkPJ/VB3iA=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd h1:l+vLbuxptsC6VQyQsfD7NnEC8BZuFpz45PgY+pH8YTg=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-photoshop-info-format v0.0.0-20200609050348-3db9b63b202c/go.mod h1:pqKB+ijp27cEcrHxhXVgUUMlSDRuGJJp1E+20Lj5H0E=
|
||||
github.com/dsoprea/go-photoshop-info-format v0.0.0-20200610045659-121dd752914d h1:dg6UMHa50VI01WuPWXPbNJpO8QSyvIF5T5n2IZiqX3A=
|
||||
github.com/dsoprea/go-photoshop-info-format v0.0.0-20200610045659-121dd752914d/go.mod h1:pqKB+ijp27cEcrHxhXVgUUMlSDRuGJJp1E+20Lj5H0E=
|
||||
github.com/dsoprea/go-png-image-structure v0.0.0-20200807080309-a98d4e94ac82 h1:RdwKOEEe2ND/JmoKh6I/EQlR9idKJTDOMffPFK6vN2M=
|
||||
github.com/dsoprea/go-png-image-structure v0.0.0-20200807080309-a98d4e94ac82/go.mod h1:aDYQkL/5gfRNZkoxiLTSWU4Y8/gV/4MVsy/MU9uwTak=
|
||||
github.com/dsoprea/go-tiff-image-structure v0.0.0-20200807080429-5631a803a91b h1:0RmQug1S1/hpzATPKJzTj1NBG93aFxzYZFKPIXjNkGU=
|
||||
github.com/dsoprea/go-tiff-image-structure v0.0.0-20200807080429-5631a803a91b/go.mod h1:we+M+yrq8ifsA33a7C7p8E1ztBbdDYjMIC8RMm8KPL8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200512094054-1abbbc781176/go.mod h1:95+K3z2L0mqsVYd6yveIv1lmtT3tcQQ3dVakPySffW8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf/go.mod h1:95+K3z2L0mqsVYd6yveIv1lmtT3tcQQ3dVakPySffW8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200717064901-2fccff4aa15e h1:ojqYA1mU6LuRm8XzrVOvyfb000y59cbUcu6Wt8sFSAs=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200717064901-2fccff4aa15e/go.mod h1:KVK+/Hul09ujXAGq+42UBgCTnXkiJZRnLYdURGjQUwo=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20200717064901-2fccff4aa15e h1:IxIbA7VbCNrwumIYjDoMOdf4KOSkMC6NJE4s8oRbE7E=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20200717064901-2fccff4aa15e/go.mod h1:uAzdkPTub5Y9yQwXe8W4m2XuP0tK4a9Q/dantD0+uaU=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
|
||||
github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
|
||||
github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
|
||||
github.com/gin-contrib/gzip v0.0.3/go.mod h1:YxxswVZIqOvcHEQpsSn+QF5guQtO1dCfy0shBPy4jFc=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14=
|
||||
github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
github.com/go-errors/errors v1.0.2/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs=
|
||||
github.com/go-errors/errors v1.1.1 h1:ljK/pL5ltg3qoN+OtN6yCv9HWSfMwxSx90GJCZQxYNg=
|
||||
github.com/go-errors/errors v1.1.1/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI=
|
||||
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=
|
||||
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20210108004804-a63082ebfb66 h1:wNA26/2ftrz6nI4dbIim6OSKtLlNdjpNiwFB+l/yqtQ=
|
||||
github.com/golang/geo v0.0.0-20210108004804-a63082ebfb66/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM=
|
||||
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/open-location-code/go v0.0.0-20201229230907-d47d9f9b95e9 h1:9giiAmczhkdgh1p9riU7PFmLXpJkq1D4wkIWmEJFMEk=
|
||||
github.com/google/open-location-code/go v0.0.0-20201229230907-d47d9f9b95e9/go.mod h1:eJfRN6aj+kR/rnua/rw9jAgYhqoMHldQkdTi+sePRKk=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
|
||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
|
||||
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
|
||||
github.com/h2non/filetype v1.1.1 h1:xvOwnXKAckvtLWsN398qS9QhlxlnVXBjXBydK2/UFB4=
|
||||
github.com/h2non/filetype v1.1.1/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
|
||||
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/juju/ansiterm v0.0.0-20180109212912-720a0952cc2a/go.mod h1:UJSiEoRfvx3hP73CvoARgeLjaIOjybY9vj8PUPPFGeU=
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
|
||||
github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw=
|
||||
github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/cpuid/v2 v2.0.3 h1:DNljyrHyxlkk8139OXIAAauCwV8eQGDD6Z8YqnDXdZw=
|
||||
github.com/klauspost/cpuid/v2 v2.0.3/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kris-nova/logger v0.0.0-20181127235838-fd0d87064b06 h1:vN4d3jSss3ExzUn2cE0WctxztfOgiKvMKnDrydBsg00=
|
||||
github.com/kris-nova/logger v0.0.0-20181127235838-fd0d87064b06/go.mod h1:++9BgZujZd4v0ZTZCb5iPsaomXdZWyxotIAh1IiDm44=
|
||||
github.com/kris-nova/lolgopher v0.0.0-20210112022122-73f0047e8b65 h1:g+tnN/LHRq6LaUfeREPluv9g6jOtN3P1hQwTHofQTSw=
|
||||
github.com/kris-nova/lolgopher v0.0.0-20210112022122-73f0047e8b65/go.mod h1:V0HF/ZBlN86HqewcDC/cVxMmYDiRukWjSrgKLUAn9Js=
|
||||
github.com/leandro-lugaresi/hub v1.1.1 h1:zqp0HzFvj4HtqjMBXM2QF17o6PNmR8MJOChgeKl/aw8=
|
||||
github.com/leandro-lugaresi/hub v1.1.1/go.mod h1:XEFWanhHv6Rt3XlteHMxuNDYi8dJcpJjodpqkU+BtIo=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
|
||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||
github.com/leonelquinteros/gotext v1.4.0 h1:2NHPCto5IoMXbrT0bldPrxj0qM5asOCwtb1aUQZ1tys=
|
||||
github.com/leonelquinteros/gotext v1.4.0/go.mod h1:yZGXREmoGTtBvZHNcc+Yfug49G/2spuF/i/Qlsvz1Us=
|
||||
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/lunixbochs/vtclean v0.0.0-20180621232353-2d01aacdc34a/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI=
|
||||
github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI=
|
||||
github.com/machinebox/progress v0.2.0/go.mod h1:hl4FywxSjfmkmCrersGhmJH7KwuKl+Ueq9BXkOny+iE=
|
||||
github.com/manifoldco/promptui v0.8.0/go.mod h1:n4zTdgP0vr0S3w7/O/g98U+e0gwLScEXGwov2nIKuGQ=
|
||||
github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
|
||||
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
|
||||
github.com/mattn/go-sqlite3 v2.0.1+incompatible h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=
|
||||
github.com/mattn/go-sqlite3 v2.0.1+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/melihmucuk/geocache v0.0.0-20160621165317-521b336a001c h1:1ErTnOL2d0OvfUABvEjGcPM8cKSLxYZpJiYS4BfQ3o4=
|
||||
github.com/melihmucuk/geocache v0.0.0-20160621165317-521b336a001c/go.mod h1:CX2bLGC22DrgJTaYvKt+lOi3BACGNA60hbFXh2iWebs=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY=
|
||||
github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs=
|
||||
github.com/photoprism/photoprism v0.0.0-20210208220634-4730b996c9d5 h1:4W9nkb8DjcQ4fRYY74rQmo7Ql+a3eJyF+UyoRclTksA=
|
||||
github.com/photoprism/photoprism v0.0.0-20210208220634-4730b996c9d5/go.mod h1:UJAxade/0KRjmFpwxbbzCZ3caUS6Rips6kUJa9c9/6Y=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be h1:ta7tUOvsPHVHGom5hKW5VXNc2xZIkfCKP8iaqOyYtUQ=
|
||||
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be/go.mod h1:MIDFMn7db1kT65GmV94GzpX9Qdi7N/pQlwb+AN8wh+Q=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
github.com/sevlyar/go-daemon v0.1.5/go.mod h1:6dJpPatBT9eUwM5VCw9Bt6CdX9Tk6UWvhW3MebLDRKE=
|
||||
github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
|
||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/studio-b12/gowebdav v0.0.0-20210203212356-8244b5a5f51a h1:Zq18I/ONL/ynTg+mhn78lyh14vNjOqaWfLKVZDwFUk4=
|
||||
github.com/studio-b12/gowebdav v0.0.0-20210203212356-8244b5a5f51a/go.mod h1:gCcfDlA1Y7GqOaeEKw5l9dOGx1VLdc/HuQSlQAaZ30s=
|
||||
github.com/tensorflow/tensorflow v1.15.2 h1:7/f/A664Tml/nRJg04+p3StcrsT53mkcvmxYHXI21Qo=
|
||||
github.com/tensorflow/tensorflow v1.15.2/go.mod h1:itOSERT4trABok4UOoG+X4BoKds9F3rIsySdn+Lvu90=
|
||||
github.com/tidwall/gjson v1.6.8 h1:CTmXMClGYPAmln7652e69B7OLXfTi5ABcPPwjIWUv7w=
|
||||
github.com/tidwall/gjson v1.6.8/go.mod h1:zeFuBCIqD4sN/gmqBzZ4j7Jd6UcA2Fc56x7QFsv+8fI=
|
||||
github.com/tidwall/match v1.0.3 h1:FQUVvBImDutD8wJLN6c5eMzWtjgONK9MwIBCOrUJKeE=
|
||||
github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.0.2 h1:Z7S3cePv9Jwm1KwS0513MRaoUe3S01WPbLNV40pwWZU=
|
||||
github.com/tidwall/pretty v1.0.2/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
||||
github.com/ugorji/go v1.2.3 h1:WbFSXLxDFKVN69Sk8t+XHGzVCD7R8UoAATR8NqZgTbk=
|
||||
github.com/ugorji/go v1.2.3/go.mod h1:5l8GZ8hZvmL4uMdy+mhCO1LjswGRYco9Q3HfuisB21A=
|
||||
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
||||
github.com/ugorji/go/codec v1.2.3 h1:/mVYEV+Jo3IZKeA5gBngN0AvNnQltEDkR+eQikkWQu0=
|
||||
github.com/ugorji/go/codec v1.2.3/go.mod h1:5FxzDJIgeiWJZslYHPj+LS1dq1ZBQVelZFnjsFGI/Uc=
|
||||
github.com/ulule/deepcopier v0.0.0-20200430083143-45decc6639b6 h1:TtyC78WMafNW8QFfv3TeP3yWNDG+uxNkk9vOrnDu6JA=
|
||||
github.com/ulule/deepcopier v0.0.0-20200430083143-45decc6639b6/go.mod h1:h8272+G2omSmi30fBXiZDMkmHuOgonplfKIKjQWzlfs=
|
||||
github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU=
|
||||
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go4.org v0.0.0-20200411211856-f5505b9728dd h1:BNJlw5kRTzdmyfh5U8F93HA2OwkP7ZGwA51eJ/0wKOU=
|
||||
go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad h1:DN0cp81fZ3njFcrLCytUHRSUkqBjfTo4Tx9RJTWs0EY=
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20200618115811-c13761719519 h1:1e2ufUJNM3lCHEY5jIgac/7UTjd6cgJNdatjPdFWf34=
|
||||
golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
|
||||
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210110051926-789bb1bd4061 h1:DQmQoKxQWtyybCtX/3dIuDBcAhFszqq8YiNeS6sNu1c=
|
||||
golang.org/x/sys v0.0.0-20210110051926-789bb1bd4061/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/ugjka/go-tz.v2 v2.0.12 h1:X9PY9M2eQ6DSVtOGnXZ2PPkpDQoYoOtjWC8zP8VlgBA=
|
||||
gopkg.in/ugjka/go-tz.v2 v2.0.12/go.mod h1:1iX2y1/xUdZjNIyGW/dLRRinbWrntuHYc9oIkGWFvz4=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
|
@ -1,4 +1,104 @@
|
|||
{
|
||||
"0da5bca9e57bbaa0d197ecac76a90a7b15e47a1ec93fec7c": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613501612939596202
|
||||
},
|
||||
"12b8b7ca5ebe52b071203520353652f21fda9f6e732966ba": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502549045115145
|
||||
},
|
||||
"136fe308aea8e85f6363ee4660d873eec9cb3095ef9329fb": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502823940492608
|
||||
},
|
||||
"2cf0991e18a63f088b01b452ea985dbe91612fe0e26f6d84": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613500923020420449
|
||||
},
|
||||
"3201e2a3c31f05c72057a8219ef18418d794dd68ebb6da5a": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613500727275894805
|
||||
},
|
||||
"3413f56c437b01d7761bfc8142cf154f7b8c9cf642303736": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613500755747387098
|
||||
},
|
||||
"43f0cfcca96a3671d42c3dad5a2feda055c03e95dc4b0ce3": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502911243093934
|
||||
},
|
||||
"589279988dbd4ad774ef7a59392f6bc44f416ff8d34e653e": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502738172098175
|
||||
},
|
||||
"5a159bc84d1cac8cd025db03de8e1dbf414d6bdefb967885": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502564441892628
|
||||
},
|
||||
"6fd28f3f6bff7f9542bd20cea0b358cc835cb655370aa961": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502943460686773
|
||||
},
|
||||
"910c4f6a3943e402e338511b235f43f8728d6f7e18b2c1b3": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613501561184983840
|
||||
},
|
||||
"914abb1b046e30bf2f2f91c646d8eab8f2fe74d1ba2bca4d": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502766374926483
|
||||
},
|
||||
"9185bf39909f5d94af059672446e325654f46225100ab6b1": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613500880573233477
|
||||
},
|
||||
"9de096a05ebad4b69de63ebd429069b7f95372c5c9f0952f": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502624809588007
|
||||
},
|
||||
"a55edeeb0ebaeba3f5b9bedf877dc02a71b79743337998ea": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502517447074204
|
||||
},
|
||||
"b12bcc8e03f51a00b17584596a213e41f3cddb5b4b14a29a": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502660685792657
|
||||
},
|
||||
"c558cccdd25917056e8b7b72a2a3e5f40215d707a6fac1aa": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613501068844430353
|
||||
},
|
||||
"c5e9dad483102f3a695c233271be12f3f89129ceaaf8ad21": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613501978437418029
|
||||
},
|
||||
"c8e57b785e553dc239054cc3dd566013d5b0e172749f6488": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502032233257787
|
||||
},
|
||||
"d86c17fdb63eb3ef0665cb91fe3b29e8d823dd04eed50bac": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
"expiration": 1613502706269686566
|
||||
},
|
||||
"d92837cb1c41e37b9993d25e282efb3b337b6ae609a687d9": {
|
||||
"user": "uqnzie01i1nypnt9",
|
||||
"tokens": null,
|
||||
|
|
Binary file not shown.
|
@ -6,7 +6,8 @@ OriginalName: EpTcef3VoAEiaS4
|
|||
Year: -1
|
||||
Month: -1
|
||||
Day: -1
|
||||
Quality: 2
|
||||
Details:
|
||||
Keywords: black, cat, eptcef, voaeias
|
||||
CreatedAt: 2021-02-04T03:17:14.849461459Z
|
||||
UpdatedAt: 2021-02-04T03:17:19.316067097Z
|
||||
UpdatedAt: 2021-02-09T18:40:53.525508824Z
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
TakenAt: 2021-02-04T03:17:07Z
|
||||
UID: pqnzigq351j2fqgn
|
||||
Type: image
|
||||
Title: Unknown
|
||||
Title: Tambourine Bitches!
|
||||
TitleSrc: manual
|
||||
Description: Nova getting her ass smacked with a tambourine!
|
||||
DescriptionSrc: manual
|
||||
OriginalName: IMG_3044
|
||||
Year: -1
|
||||
Month: -1
|
||||
Day: -1
|
||||
Quality: 3
|
||||
Details:
|
||||
Keywords: green
|
||||
Keywords: green, mean, tambourine
|
||||
KeywordsSrc: manual
|
||||
CreatedAt: 2021-02-04T03:17:14.613092062Z
|
||||
UpdatedAt: 2021-02-04T03:17:19.229760864Z
|
||||
UpdatedAt: 2021-02-09T19:13:30.928324902Z
|
||||
EditedAt: 2021-02-09T19:13:31Z
|
||||
|
|
|
@ -7,8 +7,9 @@ OriginalName: ElgexEiU8AA-pQO
|
|||
Year: -1
|
||||
Month: -1
|
||||
Day: -1
|
||||
Quality: 3
|
||||
Details:
|
||||
Keywords: blue, elgexeiu, portrait
|
||||
CreatedAt: 2021-02-04T03:17:14.668332772Z
|
||||
UpdatedAt: 2021-02-04T04:27:16.228466834Z
|
||||
EditedAt: 2021-02-04T04:27:16Z
|
||||
UpdatedAt: 2021-02-09T18:40:53.626530938Z
|
||||
EditedAt: 2021-02-09T18:39:46Z
|
||||
|
|
|
@ -6,7 +6,8 @@ OriginalName: NVA05562
|
|||
Year: -1
|
||||
Month: -1
|
||||
Day: -1
|
||||
Quality: 2
|
||||
Details:
|
||||
Keywords: grey, seashore
|
||||
CreatedAt: 2021-02-04T03:17:14.738798274Z
|
||||
UpdatedAt: 2021-02-04T03:17:19.271986797Z
|
||||
UpdatedAt: 2021-02-09T18:40:53.807687859Z
|
||||
|
|
|
@ -48,21 +48,3 @@ func TestMain(m *testing.M) {
|
|||
// --- [ Tests ] ---
|
||||
|
||||
}
|
||||
|
||||
func TestHappyAPI(t *testing.T) {
|
||||
// Code to validate the API
|
||||
if true {
|
||||
t.Logf("Success!\n")
|
||||
} else {
|
||||
t.Errorf("Failure!\n")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSadAPI(t *testing.T) {
|
||||
// Code to validate the API
|
||||
if !false {
|
||||
t.Logf("Success!\n")
|
||||
} else {
|
||||
t.Errorf("Failure!\n")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
photoprism "github.com/kris-nova/client-go"
|
||||
)
|
||||
|
||||
const (
|
||||
WellKnownUser = "admin"
|
||||
WellKnownPass = "missy"
|
||||
BadPassword = "charlie"
|
||||
)
|
||||
|
||||
// TestHappyLogin should succeed with the good password "missy"
|
||||
func TestHappyLogin(t *testing.T) {
|
||||
creds := photoprism.NewClientAuthLogin(WellKnownUser, WellKnownPass)
|
||||
client := photoprism.New("localhost:8080")
|
||||
err := client.Auth(creds)
|
||||
if err != nil {
|
||||
t.Errorf("invalid login: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// TestSadLogin should fail with the bad password "charlie"
|
||||
func TestSadLogin(t *testing.T) {
|
||||
creds := photoprism.NewClientAuthLogin(WellKnownUser, BadPassword)
|
||||
client := photoprism.New("localhost:8080")
|
||||
err := client.Auth(creds)
|
||||
if err == nil {
|
||||
t.Errorf("Missing error for known bad password")
|
||||
return
|
||||
}
|
||||
t.Logf("Successful bad password auth attempt: %v", err)
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.13.x
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- go test -race -coverprofile=coverage.txt -covermode=atomic
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2017 Aaron Raddon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -0,0 +1,294 @@
|
|||
Go Date Parser
|
||||
---------------------------
|
||||
|
||||
Parse many date strings without knowing format in advance. Uses a scanner to read bytes and use a state machine to find format. Much faster than shotgun based parse methods. See [bench_test.go](https://github.com/araddon/dateparse/blob/master/bench_test.go) for performance comparison.
|
||||
|
||||
|
||||
[![Code Coverage](https://codecov.io/gh/araddon/dateparse/branch/master/graph/badge.svg)](https://codecov.io/gh/araddon/dateparse)
|
||||
[![GoDoc](https://godoc.org/github.com/araddon/dateparse?status.svg)](http://godoc.org/github.com/araddon/dateparse)
|
||||
[![Build Status](https://travis-ci.org/araddon/dateparse.svg?branch=master)](https://travis-ci.org/araddon/dateparse)
|
||||
[![Go ReportCard](https://goreportcard.com/badge/araddon/dateparse)](https://goreportcard.com/report/araddon/dateparse)
|
||||
|
||||
**MM/DD/YYYY VS DD/MM/YYYY** Right now this uses mm/dd/yyyy WHEN ambiguous if this is not desired behavior, use `ParseStrict` which will fail on ambiguous date strings.
|
||||
|
||||
**Timezones** The location your server is configured affects the results! See example or https://play.golang.org/p/IDHRalIyXh and last paragraph here https://golang.org/pkg/time/#Parse.
|
||||
|
||||
|
||||
```go
|
||||
|
||||
// Normal parse. Equivalent Timezone rules as time.Parse()
|
||||
t, err := dateparse.ParseAny("3/1/2014")
|
||||
|
||||
// Parse Strict, error on ambigous mm/dd vs dd/mm dates
|
||||
t, err := dateparse.ParseStrict("3/1/2014")
|
||||
> returns error
|
||||
|
||||
// Return a string that represents the layout to parse the given date-time.
|
||||
layout, err := dateparse.ParseFormat("May 8, 2009 5:57:51 PM")
|
||||
> "Jan 2, 2006 3:04:05 PM"
|
||||
|
||||
```
|
||||
|
||||
cli tool for testing dateformats
|
||||
----------------------------------
|
||||
|
||||
[Date Parse CLI](https://github.com/araddon/dateparse/blob/master/dateparse)
|
||||
|
||||
|
||||
Extended example
|
||||
-------------------
|
||||
|
||||
https://github.com/araddon/dateparse/blob/master/example/main.go
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/scylladb/termtables"
|
||||
"github.com/araddon/dateparse"
|
||||
)
|
||||
|
||||
var examples = []string{
|
||||
"May 8, 2009 5:57:51 PM",
|
||||
"oct 7, 1970",
|
||||
"oct 7, '70",
|
||||
"oct. 7, 1970",
|
||||
"oct. 7, 70",
|
||||
"Mon Jan 2 15:04:05 2006",
|
||||
"Mon Jan 2 15:04:05 MST 2006",
|
||||
"Mon Jan 02 15:04:05 -0700 2006",
|
||||
"Monday, 02-Jan-06 15:04:05 MST",
|
||||
"Mon, 02 Jan 2006 15:04:05 MST",
|
||||
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
|
||||
"Mon, 02 Jan 2006 15:04:05 -0700",
|
||||
"Thu, 4 Jan 2018 17:53:36 +0000",
|
||||
"Mon 30 Sep 2018 09:09:09 PM UTC",
|
||||
"Mon Aug 10 15:44:11 UTC+0100 2015",
|
||||
"Thu, 4 Jan 2018 17:53:36 +0000",
|
||||
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
|
||||
"September 17, 2012 10:09am",
|
||||
"September 17, 2012 at 10:09am PST-08",
|
||||
"September 17, 2012, 10:10:09",
|
||||
"October 7, 1970",
|
||||
"October 7th, 1970",
|
||||
"12 Feb 2006, 19:17",
|
||||
"12 Feb 2006 19:17",
|
||||
"7 oct 70",
|
||||
"7 oct 1970",
|
||||
"03 February 2013",
|
||||
"1 July 2013",
|
||||
"2013-Feb-03",
|
||||
// mm/dd/yy
|
||||
"3/31/2014",
|
||||
"03/31/2014",
|
||||
"08/21/71",
|
||||
"8/1/71",
|
||||
"4/8/2014 22:05",
|
||||
"04/08/2014 22:05",
|
||||
"4/8/14 22:05",
|
||||
"04/2/2014 03:00:51",
|
||||
"8/8/1965 12:00:00 AM",
|
||||
"8/8/1965 01:00:01 PM",
|
||||
"8/8/1965 01:00 PM",
|
||||
"8/8/1965 1:00 PM",
|
||||
"8/8/1965 12:00 AM",
|
||||
"4/02/2014 03:00:51",
|
||||
"03/19/2012 10:11:59",
|
||||
"03/19/2012 10:11:59.3186369",
|
||||
// yyyy/mm/dd
|
||||
"2014/3/31",
|
||||
"2014/03/31",
|
||||
"2014/4/8 22:05",
|
||||
"2014/04/08 22:05",
|
||||
"2014/04/2 03:00:51",
|
||||
"2014/4/02 03:00:51",
|
||||
"2012/03/19 10:11:59",
|
||||
"2012/03/19 10:11:59.3186369",
|
||||
// yyyy:mm:dd
|
||||
"2014:3:31",
|
||||
"2014:03:31",
|
||||
"2014:4:8 22:05",
|
||||
"2014:04:08 22:05",
|
||||
"2014:04:2 03:00:51",
|
||||
"2014:4:02 03:00:51",
|
||||
"2012:03:19 10:11:59",
|
||||
"2012:03:19 10:11:59.3186369",
|
||||
// Chinese
|
||||
"2014年04月08日",
|
||||
// yyyy-mm-ddThh
|
||||
"2006-01-02T15:04:05+0000",
|
||||
"2009-08-12T22:15:09-07:00",
|
||||
"2009-08-12T22:15:09",
|
||||
"2009-08-12T22:15:09Z",
|
||||
// yyyy-mm-dd hh:mm:ss
|
||||
"2014-04-26 17:24:37.3186369",
|
||||
"2012-08-03 18:31:59.257000000",
|
||||
"2014-04-26 17:24:37.123",
|
||||
"2013-04-01 22:43",
|
||||
"2013-04-01 22:43:22",
|
||||
"2014-12-16 06:20:00 UTC",
|
||||
"2014-12-16 06:20:00 GMT",
|
||||
"2014-04-26 05:24:37 PM",
|
||||
"2014-04-26 13:13:43 +0800",
|
||||
"2014-04-26 13:13:43 +0800 +08",
|
||||
"2014-04-26 13:13:44 +09:00",
|
||||
"2012-08-03 18:31:59.257000000 +0000 UTC",
|
||||
"2015-09-30 18:48:56.35272715 +0000 UTC",
|
||||
"2015-02-18 00:12:00 +0000 GMT",
|
||||
"2015-02-18 00:12:00 +0000 UTC",
|
||||
"2015-02-08 03:02:00 +0300 MSK m=+0.000000001",
|
||||
"2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001",
|
||||
"2017-07-19 03:21:51+00:00",
|
||||
"2014-04-26",
|
||||
"2014-04",
|
||||
"2014",
|
||||
"2014-05-11 08:20:13,787",
|
||||
// mm.dd.yy
|
||||
"3.31.2014",
|
||||
"03.31.2014",
|
||||
"08.21.71",
|
||||
"2014.03",
|
||||
"2014.03.30",
|
||||
// yyyymmdd and similar
|
||||
"20140601",
|
||||
"20140722105203",
|
||||
// unix seconds, ms, micro, nano
|
||||
"1332151919",
|
||||
"1384216367189",
|
||||
"1384216367111222",
|
||||
"1384216367111222333",
|
||||
}
|
||||
|
||||
var (
|
||||
timezone = ""
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.StringVar(&timezone, "timezone", "UTC", "Timezone aka `America/Los_Angeles` formatted time-zone")
|
||||
flag.Parse()
|
||||
|
||||
if timezone != "" {
|
||||
// NOTE: This is very, very important to understand
|
||||
// time-parsing in go
|
||||
loc, err := time.LoadLocation(timezone)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
time.Local = loc
|
||||
}
|
||||
|
||||
table := termtables.CreateTable()
|
||||
|
||||
table.AddHeaders("Input", "Parsed, and Output as %v")
|
||||
for _, dateExample := range examples {
|
||||
t, err := dateparse.ParseLocal(dateExample)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
table.AddRow(dateExample, fmt.Sprintf("%v", t))
|
||||
}
|
||||
fmt.Println(table.Render())
|
||||
}
|
||||
|
||||
/*
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
| Input | Parsed, and Output as %v |
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
| May 8, 2009 5:57:51 PM | 2009-05-08 17:57:51 +0000 UTC |
|
||||
| oct 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct 7, '70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct. 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| oct. 7, 70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| Mon Jan 2 15:04:05 2006 | 2006-01-02 15:04:05 +0000 UTC |
|
||||
| Mon Jan 2 15:04:05 MST 2006 | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Mon Jan 02 15:04:05 -0700 2006 | 2006-01-02 15:04:05 -0700 -0700 |
|
||||
| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Mon, 02 Jan 2006 15:04:05 MST | 2006-01-02 15:04:05 +0000 MST |
|
||||
| Tue, 11 Jul 2017 16:28:13 +0200 (CEST) | 2017-07-11 16:28:13 +0200 +0200 |
|
||||
| Mon, 02 Jan 2006 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 |
|
||||
| Mon 30 Sep 2018 09:09:09 PM UTC | 2018-09-30 21:09:09 +0000 UTC |
|
||||
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
|
||||
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
|
||||
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
|
||||
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
|
||||
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
|
||||
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
|
||||
| October 7, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
|
||||
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
|
||||
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
|
||||
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||
| 8/1/71 | 1971-08-01 00:00:00 +0000 UTC |
|
||||
| 4/8/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 04/08/2014 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 4/8/14 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 04/2/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 8/8/1965 12:00:00 AM | 1965-08-08 00:00:00 +0000 UTC |
|
||||
| 8/8/1965 01:00:01 PM | 1965-08-08 13:00:01 +0000 UTC |
|
||||
| 8/8/1965 01:00 PM | 1965-08-08 13:00:00 +0000 UTC |
|
||||
| 8/8/1965 1:00 PM | 1965-08-08 13:00:00 +0000 UTC |
|
||||
| 8/8/1965 12:00 AM | 1965-08-08 00:00:00 +0000 UTC |
|
||||
| 4/02/2014 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 03/19/2012 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 03/19/2012 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||
| 2014/3/31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014/03/31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 2014/4/8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014/04/08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||
| 2014/04/2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
|
||||
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
|
||||
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
|
||||
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
|
||||
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
|
||||
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
|
||||
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
|
||||
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
|
||||
| 2013-04-01 22:43 | 2013-04-01 22:43:00 +0000 UTC |
|
||||
| 2013-04-01 22:43:22 | 2013-04-01 22:43:22 +0000 UTC |
|
||||
| 2014-12-16 06:20:00 UTC | 2014-12-16 06:20:00 +0000 UTC |
|
||||
| 2014-12-16 06:20:00 GMT | 2014-12-16 06:20:00 +0000 UTC |
|
||||
| 2014-04-26 05:24:37 PM | 2014-04-26 17:24:37 +0000 UTC |
|
||||
| 2014-04-26 13:13:43 +0800 | 2014-04-26 13:13:43 +0800 +0800 |
|
||||
| 2014-04-26 13:13:43 +0800 +08 | 2014-04-26 13:13:43 +0800 +0800 |
|
||||
| 2014-04-26 13:13:44 +09:00 | 2014-04-26 13:13:44 +0900 +0900 |
|
||||
| 2012-08-03 18:31:59.257000000 +0000 UTC | 2012-08-03 18:31:59.257 +0000 UTC |
|
||||
| 2015-09-30 18:48:56.35272715 +0000 UTC | 2015-09-30 18:48:56.35272715 +0000 UTC |
|
||||
| 2015-02-18 00:12:00 +0000 GMT | 2015-02-18 00:12:00 +0000 UTC |
|
||||
| 2015-02-18 00:12:00 +0000 UTC | 2015-02-18 00:12:00 +0000 UTC |
|
||||
| 2015-02-08 03:02:00 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00 +0300 +0300 |
|
||||
| 2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001 | 2015-02-08 03:02:00.001 +0300 +0300 |
|
||||
| 2017-07-19 03:21:51+00:00 | 2017-07-19 03:21:51 +0000 UTC |
|
||||
| 2014-04-26 | 2014-04-26 00:00:00 +0000 UTC |
|
||||
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
|
||||
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
|
||||
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
|
||||
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||
| 2014.03 | 2014-03-01 00:00:00 +0000 UTC |
|
||||
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
|
||||
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
|
||||
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
|
||||
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
|
||||
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
|
||||
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
|
||||
| 1384216367111222333 | 2013-11-12 00:32:47.111222333 +0000 UTC |
|
||||
+-------------------------------------------------------+-----------------------------------------+
|
||||
*/
|
||||
|
||||
```
|
|
@ -0,0 +1,9 @@
|
|||
module github.com/araddon/dateparse
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4
|
||||
github.com/stretchr/testify v1.6.1
|
||||
)
|
|
@ -0,0 +1,15 @@
|
|||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4 h1:8qmTC5ByIXO3GP/IzBkxcZ/99VITvnIETDhdFz/om7A=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Brian Goff
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,14 @@
|
|||
package md2man
|
||||
|
||||
import (
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
// Render converts a markdown document into a roff formatted document.
|
||||
func Render(doc []byte) []byte {
|
||||
renderer := NewRoffRenderer()
|
||||
|
||||
return blackfriday.Run(doc,
|
||||
[]blackfriday.Option{blackfriday.WithRenderer(renderer),
|
||||
blackfriday.WithExtensions(renderer.GetExtensions())}...)
|
||||
}
|
|
@ -0,0 +1,345 @@
|
|||
package md2man
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
// roffRenderer implements the blackfriday.Renderer interface for creating
|
||||
// roff format (manpages) from markdown text
|
||||
type roffRenderer struct {
|
||||
extensions blackfriday.Extensions
|
||||
listCounters []int
|
||||
firstHeader bool
|
||||
defineTerm bool
|
||||
listDepth int
|
||||
}
|
||||
|
||||
const (
|
||||
titleHeader = ".TH "
|
||||
topLevelHeader = "\n\n.SH "
|
||||
secondLevelHdr = "\n.SH "
|
||||
otherHeader = "\n.SS "
|
||||
crTag = "\n"
|
||||
emphTag = "\\fI"
|
||||
emphCloseTag = "\\fP"
|
||||
strongTag = "\\fB"
|
||||
strongCloseTag = "\\fP"
|
||||
breakTag = "\n.br\n"
|
||||
paraTag = "\n.PP\n"
|
||||
hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n"
|
||||
linkTag = "\n\\[la]"
|
||||
linkCloseTag = "\\[ra]"
|
||||
codespanTag = "\\fB\\fC"
|
||||
codespanCloseTag = "\\fR"
|
||||
codeTag = "\n.PP\n.RS\n\n.nf\n"
|
||||
codeCloseTag = "\n.fi\n.RE\n"
|
||||
quoteTag = "\n.PP\n.RS\n"
|
||||
quoteCloseTag = "\n.RE\n"
|
||||
listTag = "\n.RS\n"
|
||||
listCloseTag = "\n.RE\n"
|
||||
arglistTag = "\n.TP\n"
|
||||
tableStart = "\n.TS\nallbox;\n"
|
||||
tableEnd = ".TE\n"
|
||||
tableCellStart = "T{\n"
|
||||
tableCellEnd = "\nT}\n"
|
||||
)
|
||||
|
||||
// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents
|
||||
// from markdown
|
||||
func NewRoffRenderer() *roffRenderer { // nolint: golint
|
||||
var extensions blackfriday.Extensions
|
||||
|
||||
extensions |= blackfriday.NoIntraEmphasis
|
||||
extensions |= blackfriday.Tables
|
||||
extensions |= blackfriday.FencedCode
|
||||
extensions |= blackfriday.SpaceHeadings
|
||||
extensions |= blackfriday.Footnotes
|
||||
extensions |= blackfriday.Titleblock
|
||||
extensions |= blackfriday.DefinitionLists
|
||||
return &roffRenderer{
|
||||
extensions: extensions,
|
||||
}
|
||||
}
|
||||
|
||||
// GetExtensions returns the list of extensions used by this renderer implementation
|
||||
func (r *roffRenderer) GetExtensions() blackfriday.Extensions {
|
||||
return r.extensions
|
||||
}
|
||||
|
||||
// RenderHeader handles outputting the header at document start
|
||||
func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) {
|
||||
// disable hyphenation
|
||||
out(w, ".nh\n")
|
||||
}
|
||||
|
||||
// RenderFooter handles outputting the footer at the document end; the roff
|
||||
// renderer has no footer information
|
||||
func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {
|
||||
}
|
||||
|
||||
// RenderNode is called for each node in a markdown document; based on the node
|
||||
// type the equivalent roff output is sent to the writer
|
||||
func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
|
||||
var walkAction = blackfriday.GoToNext
|
||||
|
||||
switch node.Type {
|
||||
case blackfriday.Text:
|
||||
r.handleText(w, node, entering)
|
||||
case blackfriday.Softbreak:
|
||||
out(w, crTag)
|
||||
case blackfriday.Hardbreak:
|
||||
out(w, breakTag)
|
||||
case blackfriday.Emph:
|
||||
if entering {
|
||||
out(w, emphTag)
|
||||
} else {
|
||||
out(w, emphCloseTag)
|
||||
}
|
||||
case blackfriday.Strong:
|
||||
if entering {
|
||||
out(w, strongTag)
|
||||
} else {
|
||||
out(w, strongCloseTag)
|
||||
}
|
||||
case blackfriday.Link:
|
||||
if !entering {
|
||||
out(w, linkTag+string(node.LinkData.Destination)+linkCloseTag)
|
||||
}
|
||||
case blackfriday.Image:
|
||||
// ignore images
|
||||
walkAction = blackfriday.SkipChildren
|
||||
case blackfriday.Code:
|
||||
out(w, codespanTag)
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
out(w, codespanCloseTag)
|
||||
case blackfriday.Document:
|
||||
break
|
||||
case blackfriday.Paragraph:
|
||||
// roff .PP markers break lists
|
||||
if r.listDepth > 0 {
|
||||
return blackfriday.GoToNext
|
||||
}
|
||||
if entering {
|
||||
out(w, paraTag)
|
||||
} else {
|
||||
out(w, crTag)
|
||||
}
|
||||
case blackfriday.BlockQuote:
|
||||
if entering {
|
||||
out(w, quoteTag)
|
||||
} else {
|
||||
out(w, quoteCloseTag)
|
||||
}
|
||||
case blackfriday.Heading:
|
||||
r.handleHeading(w, node, entering)
|
||||
case blackfriday.HorizontalRule:
|
||||
out(w, hruleTag)
|
||||
case blackfriday.List:
|
||||
r.handleList(w, node, entering)
|
||||
case blackfriday.Item:
|
||||
r.handleItem(w, node, entering)
|
||||
case blackfriday.CodeBlock:
|
||||
out(w, codeTag)
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
out(w, codeCloseTag)
|
||||
case blackfriday.Table:
|
||||
r.handleTable(w, node, entering)
|
||||
case blackfriday.TableCell:
|
||||
r.handleTableCell(w, node, entering)
|
||||
case blackfriday.TableHead:
|
||||
case blackfriday.TableBody:
|
||||
case blackfriday.TableRow:
|
||||
// no action as cell entries do all the nroff formatting
|
||||
return blackfriday.GoToNext
|
||||
default:
|
||||
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
|
||||
}
|
||||
return walkAction
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleText(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
var (
|
||||
start, end string
|
||||
)
|
||||
// handle special roff table cell text encapsulation
|
||||
if node.Parent.Type == blackfriday.TableCell {
|
||||
if len(node.Literal) > 30 {
|
||||
start = tableCellStart
|
||||
end = tableCellEnd
|
||||
} else {
|
||||
// end rows that aren't terminated by "tableCellEnd" with a cr if end of row
|
||||
if node.Parent.Next == nil && !node.Parent.IsHeader {
|
||||
end = crTag
|
||||
}
|
||||
}
|
||||
}
|
||||
out(w, start)
|
||||
escapeSpecialChars(w, node.Literal)
|
||||
out(w, end)
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
switch node.Level {
|
||||
case 1:
|
||||
if !r.firstHeader {
|
||||
out(w, titleHeader)
|
||||
r.firstHeader = true
|
||||
break
|
||||
}
|
||||
out(w, topLevelHeader)
|
||||
case 2:
|
||||
out(w, secondLevelHdr)
|
||||
default:
|
||||
out(w, otherHeader)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
openTag := listTag
|
||||
closeTag := listCloseTag
|
||||
if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||
// tags for definition lists handled within Item node
|
||||
openTag = ""
|
||||
closeTag = ""
|
||||
}
|
||||
if entering {
|
||||
r.listDepth++
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
r.listCounters = append(r.listCounters, 1)
|
||||
}
|
||||
out(w, openTag)
|
||||
} else {
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
r.listCounters = r.listCounters[:len(r.listCounters)-1]
|
||||
}
|
||||
out(w, closeTag)
|
||||
r.listDepth--
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||
out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1]))
|
||||
r.listCounters[len(r.listCounters)-1]++
|
||||
} else if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||
// state machine for handling terms and following definitions
|
||||
// since blackfriday does not distinguish them properly, nor
|
||||
// does it seperate them into separate lists as it should
|
||||
if !r.defineTerm {
|
||||
out(w, arglistTag)
|
||||
r.defineTerm = true
|
||||
} else {
|
||||
r.defineTerm = false
|
||||
}
|
||||
} else {
|
||||
out(w, ".IP \\(bu 2\n")
|
||||
}
|
||||
} else {
|
||||
out(w, "\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
if entering {
|
||||
out(w, tableStart)
|
||||
//call walker to count cells (and rows?) so format section can be produced
|
||||
columns := countColumns(node)
|
||||
out(w, strings.Repeat("l ", columns)+"\n")
|
||||
out(w, strings.Repeat("l ", columns)+".\n")
|
||||
} else {
|
||||
out(w, tableEnd)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||
var (
|
||||
start, end string
|
||||
)
|
||||
if node.IsHeader {
|
||||
start = codespanTag
|
||||
end = codespanCloseTag
|
||||
}
|
||||
if entering {
|
||||
if node.Prev != nil && node.Prev.Type == blackfriday.TableCell {
|
||||
out(w, "\t"+start)
|
||||
} else {
|
||||
out(w, start)
|
||||
}
|
||||
} else {
|
||||
// need to carriage return if we are at the end of the header row
|
||||
if node.IsHeader && node.Next == nil {
|
||||
end = end + crTag
|
||||
}
|
||||
out(w, end)
|
||||
}
|
||||
}
|
||||
|
||||
// because roff format requires knowing the column count before outputting any table
|
||||
// data we need to walk a table tree and count the columns
|
||||
func countColumns(node *blackfriday.Node) int {
|
||||
var columns int
|
||||
|
||||
node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||
switch node.Type {
|
||||
case blackfriday.TableRow:
|
||||
if !entering {
|
||||
return blackfriday.Terminate
|
||||
}
|
||||
case blackfriday.TableCell:
|
||||
if entering {
|
||||
columns++
|
||||
}
|
||||
default:
|
||||
}
|
||||
return blackfriday.GoToNext
|
||||
})
|
||||
return columns
|
||||
}
|
||||
|
||||
func out(w io.Writer, output string) {
|
||||
io.WriteString(w, output) // nolint: errcheck
|
||||
}
|
||||
|
||||
func needsBackslash(c byte) bool {
|
||||
for _, r := range []byte("-_&\\~") {
|
||||
if c == r {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func escapeSpecialChars(w io.Writer, text []byte) {
|
||||
for i := 0; i < len(text); i++ {
|
||||
// escape initial apostrophe or period
|
||||
if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
|
||||
out(w, "\\&")
|
||||
}
|
||||
|
||||
// directly copy normal characters
|
||||
org := i
|
||||
|
||||
for i < len(text) && !needsBackslash(text[i]) {
|
||||
i++
|
||||
}
|
||||
if i > org {
|
||||
w.Write(text[org:i]) // nolint: errcheck
|
||||
}
|
||||
|
||||
// escape a character
|
||||
if i >= len(text) {
|
||||
break
|
||||
}
|
||||
|
||||
w.Write([]byte{'\\', text[i]}) // nolint: errcheck
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
language: go
|
||||
go:
|
||||
- "1.10.x"
|
||||
- "1.11.x"
|
||||
- "1.12.x"
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
|
||||
script:
|
||||
- go test -v -race -cover
|
||||
- $GOPATH/bin/goveralls -service=travis-ci
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012 Grigory Dryapak
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,226 @@
|
|||
# Imaging
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/disintegration/imaging?status.svg)](https://godoc.org/github.com/disintegration/imaging)
|
||||
[![Build Status](https://travis-ci.org/disintegration/imaging.svg?branch=master)](https://travis-ci.org/disintegration/imaging)
|
||||
[![Coverage Status](https://coveralls.io/repos/github/disintegration/imaging/badge.svg?branch=master&service=github)](https://coveralls.io/github/disintegration/imaging?branch=master)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/disintegration/imaging)](https://goreportcard.com/report/github.com/disintegration/imaging)
|
||||
|
||||
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
|
||||
|
||||
All the image processing functions provided by the package accept any image type that implements `image.Image` interface
|
||||
as an input, and return a new image of `*image.NRGBA` type (32bit RGBA colors, non-premultiplied alpha).
|
||||
|
||||
## Installation
|
||||
|
||||
go get -u github.com/disintegration/imaging
|
||||
|
||||
## Documentation
|
||||
|
||||
http://godoc.org/github.com/disintegration/imaging
|
||||
|
||||
## Usage examples
|
||||
|
||||
A few usage examples can be found below. See the documentation for the full list of supported functions.
|
||||
|
||||
### Image resizing
|
||||
|
||||
```go
|
||||
// Resize srcImage to size = 128x128px using the Lanczos filter.
|
||||
dstImage128 := imaging.Resize(srcImage, 128, 128, imaging.Lanczos)
|
||||
|
||||
// Resize srcImage to width = 800px preserving the aspect ratio.
|
||||
dstImage800 := imaging.Resize(srcImage, 800, 0, imaging.Lanczos)
|
||||
|
||||
// Scale down srcImage to fit the 800x600px bounding box.
|
||||
dstImageFit := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
|
||||
|
||||
// Resize and crop the srcImage to fill the 100x100px area.
|
||||
dstImageFill := imaging.Fill(srcImage, 100, 100, imaging.Center, imaging.Lanczos)
|
||||
```
|
||||
|
||||
Imaging supports image resizing using various resampling filters. The most notable ones:
|
||||
- `Lanczos` - A high-quality resampling filter for photographic images yielding sharp results.
|
||||
- `CatmullRom` - A sharp cubic filter that is faster than Lanczos filter while providing similar results.
|
||||
- `MitchellNetravali` - A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
|
||||
- `Linear` - Bilinear resampling filter, produces smooth output. Faster than cubic filters.
|
||||
- `Box` - Simple and fast averaging filter appropriate for downscaling. When upscaling it's similar to NearestNeighbor.
|
||||
- `NearestNeighbor` - Fastest resampling filter, no antialiasing.
|
||||
|
||||
The full list of supported filters: NearestNeighbor, Box, Linear, Hermite, MitchellNetravali, CatmullRom, BSpline, Gaussian, Lanczos, Hann, Hamming, Blackman, Bartlett, Welch, Cosine. Custom filters can be created using ResampleFilter struct.
|
||||
|
||||
**Resampling filters comparison**
|
||||
|
||||
Original image:
|
||||
|
||||
![srcImage](testdata/branches.png)
|
||||
|
||||
The same image resized from 600x400px to 150x100px using different resampling filters.
|
||||
From faster (lower quality) to slower (higher quality):
|
||||
|
||||
Filter | Resize result
|
||||
--------------------------|---------------------------------------------
|
||||
`imaging.NearestNeighbor` | ![dstImage](testdata/out_resize_nearest.png)
|
||||
`imaging.Linear` | ![dstImage](testdata/out_resize_linear.png)
|
||||
`imaging.CatmullRom` | ![dstImage](testdata/out_resize_catrom.png)
|
||||
`imaging.Lanczos` | ![dstImage](testdata/out_resize_lanczos.png)
|
||||
|
||||
|
||||
### Gaussian Blur
|
||||
|
||||
```go
|
||||
dstImage := imaging.Blur(srcImage, 0.5)
|
||||
```
|
||||
|
||||
Sigma parameter allows to control the strength of the blurring effect.
|
||||
|
||||
Original image | Sigma = 0.5 | Sigma = 1.5
|
||||
-----------------------------------|----------------------------------------|---------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_blur_0.5.png) | ![dstImage](testdata/out_blur_1.5.png)
|
||||
|
||||
### Sharpening
|
||||
|
||||
```go
|
||||
dstImage := imaging.Sharpen(srcImage, 0.5)
|
||||
```
|
||||
|
||||
`Sharpen` uses gaussian function internally. Sigma parameter allows to control the strength of the sharpening effect.
|
||||
|
||||
Original image | Sigma = 0.5 | Sigma = 1.5
|
||||
-----------------------------------|-------------------------------------------|------------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_sharpen_0.5.png) | ![dstImage](testdata/out_sharpen_1.5.png)
|
||||
|
||||
### Gamma correction
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustGamma(srcImage, 0.75)
|
||||
```
|
||||
|
||||
Original image | Gamma = 0.75 | Gamma = 1.25
|
||||
-----------------------------------|------------------------------------------|-----------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_gamma_0.75.png) | ![dstImage](testdata/out_gamma_1.25.png)
|
||||
|
||||
### Contrast adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustContrast(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Contrast = 15 | Contrast = -15
|
||||
-----------------------------------|--------------------------------------------|-------------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_contrast_p15.png) | ![dstImage](testdata/out_contrast_m15.png)
|
||||
|
||||
### Brightness adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustBrightness(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Brightness = 10 | Brightness = -10
|
||||
-----------------------------------|----------------------------------------------|---------------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_brightness_p10.png) | ![dstImage](testdata/out_brightness_m10.png)
|
||||
|
||||
### Saturation adjustment
|
||||
|
||||
```go
|
||||
dstImage := imaging.AdjustSaturation(srcImage, 20)
|
||||
```
|
||||
|
||||
Original image | Saturation = 30 | Saturation = -30
|
||||
-----------------------------------|----------------------------------------------|---------------------------------------------
|
||||
![srcImage](testdata/flowers_small.png) | ![dstImage](testdata/out_saturation_p30.png) | ![dstImage](testdata/out_saturation_m30.png)
|
||||
|
||||
## FAQ
|
||||
|
||||
### Incorrect image orientation after processing (e.g. an image appears rotated after resizing)
|
||||
|
||||
Most probably, the given image contains the EXIF orientation tag.
|
||||
The stadard `image/*` packages do not support loading and saving
|
||||
this kind of information. To fix the issue, try opening images with
|
||||
the `AutoOrientation` decode option. If this option is set to `true`,
|
||||
the image orientation is changed after decoding, according to the
|
||||
orientation tag (if present). Here's the example:
|
||||
|
||||
```go
|
||||
img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
|
||||
```
|
||||
|
||||
### What's the difference between `imaging` and `gift` packages?
|
||||
|
||||
[imaging](https://github.com/disintegration/imaging)
|
||||
is designed to be a lightweight and simple image manipulation package.
|
||||
It provides basic image processing functions and a few helper functions
|
||||
such as `Open` and `Save`. It consistently returns *image.NRGBA image
|
||||
type (8 bits per channel, RGBA).
|
||||
|
||||
[gift](https://github.com/disintegration/gift)
|
||||
supports more advanced image processing, for example, sRGB/Linear color
|
||||
space conversions. It also supports different output image types
|
||||
(e.g. 16 bits per channel) and provides easy-to-use API for chaining
|
||||
multiple processing steps together.
|
||||
|
||||
## Example code
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Open a test image.
|
||||
src, err := imaging.Open("testdata/flowers.png")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to open image: %v", err)
|
||||
}
|
||||
|
||||
// Crop the original image to 300x300px size using the center anchor.
|
||||
src = imaging.CropAnchor(src, 300, 300, imaging.Center)
|
||||
|
||||
// Resize the cropped image to width = 200px preserving the aspect ratio.
|
||||
src = imaging.Resize(src, 200, 0, imaging.Lanczos)
|
||||
|
||||
// Create a blurred version of the image.
|
||||
img1 := imaging.Blur(src, 5)
|
||||
|
||||
// Create a grayscale version of the image with higher contrast and sharpness.
|
||||
img2 := imaging.Grayscale(src)
|
||||
img2 = imaging.AdjustContrast(img2, 20)
|
||||
img2 = imaging.Sharpen(img2, 2)
|
||||
|
||||
// Create an inverted version of the image.
|
||||
img3 := imaging.Invert(src)
|
||||
|
||||
// Create an embossed version of the image using a convolution filter.
|
||||
img4 := imaging.Convolve3x3(
|
||||
src,
|
||||
[9]float64{
|
||||
-1, -1, 0,
|
||||
-1, 1, 1,
|
||||
0, 1, 1,
|
||||
},
|
||||
nil,
|
||||
)
|
||||
|
||||
// Create a new image and paste the four produced images into it.
|
||||
dst := imaging.New(400, 400, color.NRGBA{0, 0, 0, 0})
|
||||
dst = imaging.Paste(dst, img1, image.Pt(0, 0))
|
||||
dst = imaging.Paste(dst, img2, image.Pt(0, 200))
|
||||
dst = imaging.Paste(dst, img3, image.Pt(200, 0))
|
||||
dst = imaging.Paste(dst, img4, image.Pt(200, 200))
|
||||
|
||||
// Save the resulting image as JPEG.
|
||||
err = imaging.Save(dst, "testdata/out_example.jpg")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to save image: %v", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
![dstImage](testdata/out_example.jpg)
|
|
@ -0,0 +1,253 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// Grayscale produces a grayscale version of the image.
|
||||
func Grayscale(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
r := d[0]
|
||||
g := d[1]
|
||||
b := d[2]
|
||||
f := 0.299*float64(r) + 0.587*float64(g) + 0.114*float64(b)
|
||||
y := uint8(f + 0.5)
|
||||
d[0] = y
|
||||
d[1] = y
|
||||
d[2] = y
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Invert produces an inverted (negated) version of the image.
|
||||
func Invert(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = 255 - d[0]
|
||||
d[1] = 255 - d[1]
|
||||
d[2] = 255 - d[2]
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// AdjustSaturation changes the saturation of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in the range (-100, 100).
|
||||
// The percentage = 0 gives the original image.
|
||||
// The percentage = 100 gives the image with the saturation value doubled for each pixel.
|
||||
// The percentage = -100 gives the image with the saturation value zeroed for each pixel (grayscale).
|
||||
//
|
||||
// Examples:
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, 25) // Increase image saturation by 25%.
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, -10) // Decrease image saturation by 10%.
|
||||
//
|
||||
func AdjustSaturation(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100), 100)
|
||||
multiplier := 1 + percentage/100
|
||||
|
||||
return AdjustFunc(img, func(c color.NRGBA) color.NRGBA {
|
||||
h, s, l := rgbToHSL(c.R, c.G, c.B)
|
||||
s *= multiplier
|
||||
if s > 1 {
|
||||
s = 1
|
||||
}
|
||||
r, g, b := hslToRGB(h, s, l)
|
||||
return color.NRGBA{r, g, b, c.A}
|
||||
})
|
||||
}
|
||||
|
||||
// AdjustContrast changes the contrast of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
|
||||
// The percentage = -100 gives solid gray image.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustContrast(srcImage, -10) // Decrease image contrast by 10%.
|
||||
// dstImage = imaging.AdjustContrast(srcImage, 20) // Increase image contrast by 20%.
|
||||
//
|
||||
func AdjustContrast(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
v := (100.0 + percentage) / 100.0
|
||||
for i := 0; i < 256; i++ {
|
||||
switch {
|
||||
case 0 <= v && v <= 1:
|
||||
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*v) * 255.0)
|
||||
case 1 < v && v < 2:
|
||||
lut[i] = clamp((0.5 + (float64(i)/255.0-0.5)*(1/(2.0-v))) * 255.0)
|
||||
default:
|
||||
lut[i] = uint8(float64(i)/255.0+0.5) * 255
|
||||
}
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustBrightness changes the brightness of the image using the percentage parameter and returns the adjusted image.
|
||||
// The percentage must be in range (-100, 100). The percentage = 0 gives the original image.
|
||||
// The percentage = -100 gives solid black image. The percentage = 100 gives solid white image.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, -15) // Decrease image brightness by 15%.
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, 10) // Increase image brightness by 10%.
|
||||
//
|
||||
func AdjustBrightness(img image.Image, percentage float64) *image.NRGBA {
|
||||
percentage = math.Min(math.Max(percentage, -100.0), 100.0)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
shift := 255.0 * percentage / 100.0
|
||||
for i := 0; i < 256; i++ {
|
||||
lut[i] = clamp(float64(i) + shift)
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustGamma performs a gamma correction on the image and returns the adjusted image.
|
||||
// Gamma parameter must be positive. Gamma = 1.0 gives the original image.
|
||||
// Gamma less than 1.0 darkens the image and gamma greater than 1.0 lightens it.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage = imaging.AdjustGamma(srcImage, 0.7)
|
||||
//
|
||||
func AdjustGamma(img image.Image, gamma float64) *image.NRGBA {
|
||||
e := 1.0 / math.Max(gamma, 0.0001)
|
||||
lut := make([]uint8, 256)
|
||||
|
||||
for i := 0; i < 256; i++ {
|
||||
lut[i] = clamp(math.Pow(float64(i)/255.0, e) * 255.0)
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
// AdjustSigmoid changes the contrast of the image using a sigmoidal function and returns the adjusted image.
|
||||
// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
|
||||
// The midpoint parameter is the midpoint of contrast that must be between 0 and 1, typically 0.5.
|
||||
// The factor parameter indicates how much to increase or decrease the contrast, typically in range (-10, 10).
|
||||
// If the factor parameter is positive the image contrast is increased otherwise the contrast is decreased.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, 3.0) // Increase the contrast.
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, -3.0) // Decrease the contrast.
|
||||
//
|
||||
func AdjustSigmoid(img image.Image, midpoint, factor float64) *image.NRGBA {
|
||||
if factor == 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
lut := make([]uint8, 256)
|
||||
a := math.Min(math.Max(midpoint, 0.0), 1.0)
|
||||
b := math.Abs(factor)
|
||||
sig0 := sigmoid(a, b, 0)
|
||||
sig1 := sigmoid(a, b, 1)
|
||||
e := 1.0e-6
|
||||
|
||||
if factor > 0 {
|
||||
for i := 0; i < 256; i++ {
|
||||
x := float64(i) / 255.0
|
||||
sigX := sigmoid(a, b, x)
|
||||
f := (sigX - sig0) / (sig1 - sig0)
|
||||
lut[i] = clamp(f * 255.0)
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < 256; i++ {
|
||||
x := float64(i) / 255.0
|
||||
arg := math.Min(math.Max((sig1-sig0)*x+sig0, e), 1.0-e)
|
||||
f := a - math.Log(1.0/arg-1.0)/b
|
||||
lut[i] = clamp(f * 255.0)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustLUT(img, lut)
|
||||
}
|
||||
|
||||
func sigmoid(a, b, x float64) float64 {
|
||||
return 1 / (1 + math.Exp(b*(a-x)))
|
||||
}
|
||||
|
||||
// adjustLUT applies the given lookup table to the colors of the image.
|
||||
func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
lut = lut[0:256]
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = lut[d[0]]
|
||||
d[1] = lut[d[1]]
|
||||
d[2] = lut[d[2]]
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// AdjustFunc applies the fn function to each pixel of the img image and returns the adjusted image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage = imaging.AdjustFunc(
|
||||
// srcImage,
|
||||
// func(c color.NRGBA) color.NRGBA {
|
||||
// // Shift the red channel by 16.
|
||||
// r := int(c.R) + 16
|
||||
// if r > 255 {
|
||||
// r = 255
|
||||
// }
|
||||
// return color.NRGBA{uint8(r), c.G, c.B, c.A}
|
||||
// }
|
||||
// )
|
||||
//
|
||||
func AdjustFunc(img image.Image, fn func(c color.NRGBA) color.NRGBA) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+4 : i+4]
|
||||
r := d[0]
|
||||
g := d[1]
|
||||
b := d[2]
|
||||
a := d[3]
|
||||
c := fn(color.NRGBA{r, g, b, a})
|
||||
d[0] = c.R
|
||||
d[1] = c.G
|
||||
d[2] = c.B
|
||||
d[3] = c.A
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
)
|
||||
|
||||
// ConvolveOptions are convolution parameters.
|
||||
type ConvolveOptions struct {
|
||||
// If Normalize is true the kernel is normalized before convolution.
|
||||
Normalize bool
|
||||
|
||||
// If Abs is true the absolute value of each color channel is taken after convolution.
|
||||
Abs bool
|
||||
|
||||
// Bias is added to each color channel value after convolution.
|
||||
Bias int
|
||||
}
|
||||
|
||||
// Convolve3x3 convolves the image with the specified 3x3 convolution kernel.
|
||||
// Default parameters are used if a nil *ConvolveOptions is passed.
|
||||
func Convolve3x3(img image.Image, kernel [9]float64, options *ConvolveOptions) *image.NRGBA {
|
||||
return convolve(img, kernel[:], options)
|
||||
}
|
||||
|
||||
// Convolve5x5 convolves the image with the specified 5x5 convolution kernel.
|
||||
// Default parameters are used if a nil *ConvolveOptions is passed.
|
||||
func Convolve5x5(img image.Image, kernel [25]float64, options *ConvolveOptions) *image.NRGBA {
|
||||
return convolve(img, kernel[:], options)
|
||||
}
|
||||
|
||||
func convolve(img image.Image, kernel []float64, options *ConvolveOptions) *image.NRGBA {
|
||||
src := toNRGBA(img)
|
||||
w := src.Bounds().Max.X
|
||||
h := src.Bounds().Max.Y
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, w, h))
|
||||
|
||||
if w < 1 || h < 1 {
|
||||
return dst
|
||||
}
|
||||
|
||||
if options == nil {
|
||||
options = &ConvolveOptions{}
|
||||
}
|
||||
|
||||
if options.Normalize {
|
||||
normalizeKernel(kernel)
|
||||
}
|
||||
|
||||
type coef struct {
|
||||
x, y int
|
||||
k float64
|
||||
}
|
||||
var coefs []coef
|
||||
var m int
|
||||
|
||||
switch len(kernel) {
|
||||
case 9:
|
||||
m = 1
|
||||
case 25:
|
||||
m = 2
|
||||
}
|
||||
|
||||
i := 0
|
||||
for y := -m; y <= m; y++ {
|
||||
for x := -m; x <= m; x++ {
|
||||
if kernel[i] != 0 {
|
||||
coefs = append(coefs, coef{x: x, y: y, k: kernel[i]})
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
parallel(0, h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
for x := 0; x < w; x++ {
|
||||
var r, g, b float64
|
||||
for _, c := range coefs {
|
||||
ix := x + c.x
|
||||
if ix < 0 {
|
||||
ix = 0
|
||||
} else if ix >= w {
|
||||
ix = w - 1
|
||||
}
|
||||
|
||||
iy := y + c.y
|
||||
if iy < 0 {
|
||||
iy = 0
|
||||
} else if iy >= h {
|
||||
iy = h - 1
|
||||
}
|
||||
|
||||
off := iy*src.Stride + ix*4
|
||||
s := src.Pix[off : off+3 : off+3]
|
||||
r += float64(s[0]) * c.k
|
||||
g += float64(s[1]) * c.k
|
||||
b += float64(s[2]) * c.k
|
||||
}
|
||||
|
||||
if options.Abs {
|
||||
if r < 0 {
|
||||
r = -r
|
||||
}
|
||||
if g < 0 {
|
||||
g = -g
|
||||
}
|
||||
if b < 0 {
|
||||
b = -b
|
||||
}
|
||||
}
|
||||
|
||||
if options.Bias != 0 {
|
||||
r += float64(options.Bias)
|
||||
g += float64(options.Bias)
|
||||
b += float64(options.Bias)
|
||||
}
|
||||
|
||||
srcOff := y*src.Stride + x*4
|
||||
dstOff := y*dst.Stride + x*4
|
||||
d := dst.Pix[dstOff : dstOff+4 : dstOff+4]
|
||||
d[0] = clamp(r)
|
||||
d[1] = clamp(g)
|
||||
d[2] = clamp(b)
|
||||
d[3] = src.Pix[srcOff+3]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func normalizeKernel(kernel []float64) {
|
||||
var sum, sumpos float64
|
||||
for i := range kernel {
|
||||
sum += kernel[i]
|
||||
if kernel[i] > 0 {
|
||||
sumpos += kernel[i]
|
||||
}
|
||||
}
|
||||
if sum != 0 {
|
||||
for i := range kernel {
|
||||
kernel[i] /= sum
|
||||
}
|
||||
} else if sumpos != 0 {
|
||||
for i := range kernel {
|
||||
kernel[i] /= sumpos
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
Package imaging provides basic image processing functions (resize, rotate, crop, brightness/contrast adjustments, etc.).
|
||||
|
||||
All the image processing functions provided by the package accept any image type that implements image.Image interface
|
||||
as an input, and return a new image of *image.NRGBA type (32bit RGBA colors, non-premultiplied alpha).
|
||||
*/
|
||||
package imaging
|
|
@ -0,0 +1,169 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
)
|
||||
|
||||
func gaussianBlurKernel(x, sigma float64) float64 {
|
||||
return math.Exp(-(x*x)/(2*sigma*sigma)) / (sigma * math.Sqrt(2*math.Pi))
|
||||
}
|
||||
|
||||
// Blur produces a blurred version of the image using a Gaussian function.
|
||||
// Sigma parameter must be positive and indicates how much the image will be blurred.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Blur(srcImage, 3.5)
|
||||
//
|
||||
func Blur(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
radius := int(math.Ceil(sigma * 3.0))
|
||||
kernel := make([]float64, radius+1)
|
||||
|
||||
for i := 0; i <= radius; i++ {
|
||||
kernel[i] = gaussianBlurKernel(float64(i), sigma)
|
||||
}
|
||||
|
||||
return blurVertical(blurHorizontal(img, kernel), kernel)
|
||||
}
|
||||
|
||||
func blurHorizontal(img image.Image, kernel []float64) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for x := 0; x < src.w; x++ {
|
||||
min := x - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := x + radius
|
||||
if max > src.w-1 {
|
||||
max = src.w - 1
|
||||
}
|
||||
var r, g, b, a, wsum float64
|
||||
for ix := min; ix <= max; ix++ {
|
||||
i := ix * 4
|
||||
weight := kernel[absint(x-ix)]
|
||||
wsum += weight
|
||||
s := scanLineF[i : i+4 : i+4]
|
||||
wa := s[3] * weight
|
||||
r += s[0] * wa
|
||||
g += s[1] * wa
|
||||
b += s[2] * wa
|
||||
a += wa
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a / wsum)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for y := 0; y < src.h; y++ {
|
||||
min := y - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := y + radius
|
||||
if max > src.h-1 {
|
||||
max = src.h - 1
|
||||
}
|
||||
var r, g, b, a, wsum float64
|
||||
for iy := min; iy <= max; iy++ {
|
||||
i := iy * 4
|
||||
weight := kernel[absint(y-iy)]
|
||||
wsum += weight
|
||||
s := scanLineF[i : i+4 : i+4]
|
||||
wa := s[3] * weight
|
||||
r += s[0] * wa
|
||||
g += s[1] * wa
|
||||
b += s[2] * wa
|
||||
a += wa
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a / wsum)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// Sharpen produces a sharpened version of the image.
|
||||
// Sigma parameter must be positive and indicates how much the image will be sharpened.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Sharpen(srcImage, 3.5)
|
||||
//
|
||||
func Sharpen(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
blurred := Blur(img, sigma)
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
j := y * dst.Stride
|
||||
for i := 0; i < src.w*4; i++ {
|
||||
val := int(scanLine[i])<<1 - int(blurred.Pix[j])
|
||||
if val < 0 {
|
||||
val = 0
|
||||
} else if val > 0xff {
|
||||
val = 0xff
|
||||
}
|
||||
dst.Pix[j] = uint8(val)
|
||||
j++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
module github.com/disintegration/imaging
|
||||
|
||||
require golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8
|
|
@ -0,0 +1,3 @@
|
|||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
@ -0,0 +1,52 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Histogram returns a normalized histogram of an image.
|
||||
//
|
||||
// Resulting histogram is represented as an array of 256 floats, where
|
||||
// histogram[i] is a probability of a pixel being of a particular luminance i.
|
||||
func Histogram(img image.Image) [256]float64 {
|
||||
var mu sync.Mutex
|
||||
var histogram [256]float64
|
||||
var total float64
|
||||
|
||||
src := newScanner(img)
|
||||
if src.w == 0 || src.h == 0 {
|
||||
return histogram
|
||||
}
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
var tmpHistogram [256]float64
|
||||
var tmpTotal float64
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
i := 0
|
||||
for x := 0; x < src.w; x++ {
|
||||
s := scanLine[i : i+3 : i+3]
|
||||
r := s[0]
|
||||
g := s[1]
|
||||
b := s[2]
|
||||
y := 0.299*float32(r) + 0.587*float32(g) + 0.114*float32(b)
|
||||
tmpHistogram[int(y+0.5)]++
|
||||
tmpTotal++
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
mu.Lock()
|
||||
for i := 0; i < 256; i++ {
|
||||
histogram[i] += tmpHistogram[i]
|
||||
}
|
||||
total += tmpTotal
|
||||
mu.Unlock()
|
||||
})
|
||||
|
||||
for i := 0; i < 256; i++ {
|
||||
histogram[i] = histogram[i] / total
|
||||
}
|
||||
return histogram
|
||||
}
|
|
@ -0,0 +1,444 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"image"
|
||||
"image/draw"
|
||||
"image/gif"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/image/bmp"
|
||||
"golang.org/x/image/tiff"
|
||||
)
|
||||
|
||||
type fileSystem interface {
|
||||
Create(string) (io.WriteCloser, error)
|
||||
Open(string) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
type localFS struct{}
|
||||
|
||||
func (localFS) Create(name string) (io.WriteCloser, error) { return os.Create(name) }
|
||||
func (localFS) Open(name string) (io.ReadCloser, error) { return os.Open(name) }
|
||||
|
||||
var fs fileSystem = localFS{}
|
||||
|
||||
type decodeConfig struct {
|
||||
autoOrientation bool
|
||||
}
|
||||
|
||||
var defaultDecodeConfig = decodeConfig{
|
||||
autoOrientation: false,
|
||||
}
|
||||
|
||||
// DecodeOption sets an optional parameter for the Decode and Open functions.
|
||||
type DecodeOption func(*decodeConfig)
|
||||
|
||||
// AutoOrientation returns a DecodeOption that sets the auto-orientation mode.
|
||||
// If auto-orientation is enabled, the image will be transformed after decoding
|
||||
// according to the EXIF orientation tag (if present). By default it's disabled.
|
||||
func AutoOrientation(enabled bool) DecodeOption {
|
||||
return func(c *decodeConfig) {
|
||||
c.autoOrientation = enabled
|
||||
}
|
||||
}
|
||||
|
||||
// Decode reads an image from r.
|
||||
func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
|
||||
cfg := defaultDecodeConfig
|
||||
for _, option := range opts {
|
||||
option(&cfg)
|
||||
}
|
||||
|
||||
if !cfg.autoOrientation {
|
||||
img, _, err := image.Decode(r)
|
||||
return img, err
|
||||
}
|
||||
|
||||
var orient orientation
|
||||
pr, pw := io.Pipe()
|
||||
r = io.TeeReader(r, pw)
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
defer close(done)
|
||||
orient = readOrientation(pr)
|
||||
io.Copy(ioutil.Discard, pr)
|
||||
}()
|
||||
|
||||
img, _, err := image.Decode(r)
|
||||
pw.Close()
|
||||
<-done
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return fixOrientation(img, orient), nil
|
||||
}
|
||||
|
||||
// Open loads an image from file.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Load an image from file.
|
||||
// img, err := imaging.Open("test.jpg")
|
||||
//
|
||||
// // Load an image and transform it depending on the EXIF orientation tag (if present).
|
||||
// img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
|
||||
//
|
||||
func Open(filename string, opts ...DecodeOption) (image.Image, error) {
|
||||
file, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
return Decode(file, opts...)
|
||||
}
|
||||
|
||||
// Format is an image file format.
|
||||
type Format int
|
||||
|
||||
// Image file formats.
|
||||
const (
|
||||
JPEG Format = iota
|
||||
PNG
|
||||
GIF
|
||||
TIFF
|
||||
BMP
|
||||
)
|
||||
|
||||
var formatExts = map[string]Format{
|
||||
"jpg": JPEG,
|
||||
"jpeg": JPEG,
|
||||
"png": PNG,
|
||||
"gif": GIF,
|
||||
"tif": TIFF,
|
||||
"tiff": TIFF,
|
||||
"bmp": BMP,
|
||||
}
|
||||
|
||||
var formatNames = map[Format]string{
|
||||
JPEG: "JPEG",
|
||||
PNG: "PNG",
|
||||
GIF: "GIF",
|
||||
TIFF: "TIFF",
|
||||
BMP: "BMP",
|
||||
}
|
||||
|
||||
func (f Format) String() string {
|
||||
return formatNames[f]
|
||||
}
|
||||
|
||||
// ErrUnsupportedFormat means the given image format is not supported.
|
||||
var ErrUnsupportedFormat = errors.New("imaging: unsupported image format")
|
||||
|
||||
// FormatFromExtension parses image format from filename extension:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
func FormatFromExtension(ext string) (Format, error) {
|
||||
if f, ok := formatExts[strings.ToLower(strings.TrimPrefix(ext, "."))]; ok {
|
||||
return f, nil
|
||||
}
|
||||
return -1, ErrUnsupportedFormat
|
||||
}
|
||||
|
||||
// FormatFromFilename parses image format from filename:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
func FormatFromFilename(filename string) (Format, error) {
|
||||
ext := filepath.Ext(filename)
|
||||
return FormatFromExtension(ext)
|
||||
}
|
||||
|
||||
type encodeConfig struct {
|
||||
jpegQuality int
|
||||
gifNumColors int
|
||||
gifQuantizer draw.Quantizer
|
||||
gifDrawer draw.Drawer
|
||||
pngCompressionLevel png.CompressionLevel
|
||||
}
|
||||
|
||||
var defaultEncodeConfig = encodeConfig{
|
||||
jpegQuality: 95,
|
||||
gifNumColors: 256,
|
||||
gifQuantizer: nil,
|
||||
gifDrawer: nil,
|
||||
pngCompressionLevel: png.DefaultCompression,
|
||||
}
|
||||
|
||||
// EncodeOption sets an optional parameter for the Encode and Save functions.
|
||||
type EncodeOption func(*encodeConfig)
|
||||
|
||||
// JPEGQuality returns an EncodeOption that sets the output JPEG quality.
|
||||
// Quality ranges from 1 to 100 inclusive, higher is better. Default is 95.
|
||||
func JPEGQuality(quality int) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.jpegQuality = quality
|
||||
}
|
||||
}
|
||||
|
||||
// GIFNumColors returns an EncodeOption that sets the maximum number of colors
|
||||
// used in the GIF-encoded image. It ranges from 1 to 256. Default is 256.
|
||||
func GIFNumColors(numColors int) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifNumColors = numColors
|
||||
}
|
||||
}
|
||||
|
||||
// GIFQuantizer returns an EncodeOption that sets the quantizer that is used to produce
|
||||
// a palette of the GIF-encoded image.
|
||||
func GIFQuantizer(quantizer draw.Quantizer) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifQuantizer = quantizer
|
||||
}
|
||||
}
|
||||
|
||||
// GIFDrawer returns an EncodeOption that sets the drawer that is used to convert
|
||||
// the source image to the desired palette of the GIF-encoded image.
|
||||
func GIFDrawer(drawer draw.Drawer) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.gifDrawer = drawer
|
||||
}
|
||||
}
|
||||
|
||||
// PNGCompressionLevel returns an EncodeOption that sets the compression level
|
||||
// of the PNG-encoded image. Default is png.DefaultCompression.
|
||||
func PNGCompressionLevel(level png.CompressionLevel) EncodeOption {
|
||||
return func(c *encodeConfig) {
|
||||
c.pngCompressionLevel = level
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes the image img to w in the specified format (JPEG, PNG, GIF, TIFF or BMP).
|
||||
func Encode(w io.Writer, img image.Image, format Format, opts ...EncodeOption) error {
|
||||
cfg := defaultEncodeConfig
|
||||
for _, option := range opts {
|
||||
option(&cfg)
|
||||
}
|
||||
|
||||
switch format {
|
||||
case JPEG:
|
||||
if nrgba, ok := img.(*image.NRGBA); ok && nrgba.Opaque() {
|
||||
rgba := &image.RGBA{
|
||||
Pix: nrgba.Pix,
|
||||
Stride: nrgba.Stride,
|
||||
Rect: nrgba.Rect,
|
||||
}
|
||||
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: cfg.jpegQuality})
|
||||
}
|
||||
return jpeg.Encode(w, img, &jpeg.Options{Quality: cfg.jpegQuality})
|
||||
|
||||
case PNG:
|
||||
encoder := png.Encoder{CompressionLevel: cfg.pngCompressionLevel}
|
||||
return encoder.Encode(w, img)
|
||||
|
||||
case GIF:
|
||||
return gif.Encode(w, img, &gif.Options{
|
||||
NumColors: cfg.gifNumColors,
|
||||
Quantizer: cfg.gifQuantizer,
|
||||
Drawer: cfg.gifDrawer,
|
||||
})
|
||||
|
||||
case TIFF:
|
||||
return tiff.Encode(w, img, &tiff.Options{Compression: tiff.Deflate, Predictor: true})
|
||||
|
||||
case BMP:
|
||||
return bmp.Encode(w, img)
|
||||
}
|
||||
|
||||
return ErrUnsupportedFormat
|
||||
}
|
||||
|
||||
// Save saves the image to file with the specified filename.
|
||||
// The format is determined from the filename extension:
|
||||
// "jpg" (or "jpeg"), "png", "gif", "tif" (or "tiff") and "bmp" are supported.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Save the image as PNG.
|
||||
// err := imaging.Save(img, "out.png")
|
||||
//
|
||||
// // Save the image as JPEG with optional quality parameter set to 80.
|
||||
// err := imaging.Save(img, "out.jpg", imaging.JPEGQuality(80))
|
||||
//
|
||||
func Save(img image.Image, filename string, opts ...EncodeOption) (err error) {
|
||||
f, err := FormatFromFilename(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
file, err := fs.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Encode(file, img, f, opts...)
|
||||
errc := file.Close()
|
||||
if err == nil {
|
||||
err = errc
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// orientation is an EXIF flag that specifies the transformation
|
||||
// that should be applied to image to display it correctly.
|
||||
type orientation int
|
||||
|
||||
const (
|
||||
orientationUnspecified = 0
|
||||
orientationNormal = 1
|
||||
orientationFlipH = 2
|
||||
orientationRotate180 = 3
|
||||
orientationFlipV = 4
|
||||
orientationTranspose = 5
|
||||
orientationRotate270 = 6
|
||||
orientationTransverse = 7
|
||||
orientationRotate90 = 8
|
||||
)
|
||||
|
||||
// readOrientation tries to read the orientation EXIF flag from image data in r.
|
||||
// If the EXIF data block is not found or the orientation flag is not found
|
||||
// or any other error occures while reading the data, it returns the
|
||||
// orientationUnspecified (0) value.
|
||||
func readOrientation(r io.Reader) orientation {
|
||||
const (
|
||||
markerSOI = 0xffd8
|
||||
markerAPP1 = 0xffe1
|
||||
exifHeader = 0x45786966
|
||||
byteOrderBE = 0x4d4d
|
||||
byteOrderLE = 0x4949
|
||||
orientationTag = 0x0112
|
||||
)
|
||||
|
||||
// Check if JPEG SOI marker is present.
|
||||
var soi uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &soi); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if soi != markerSOI {
|
||||
return orientationUnspecified // Missing JPEG SOI marker.
|
||||
}
|
||||
|
||||
// Find JPEG APP1 marker.
|
||||
for {
|
||||
var marker, size uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &marker); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if err := binary.Read(r, binary.BigEndian, &size); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if marker>>8 != 0xff {
|
||||
return orientationUnspecified // Invalid JPEG marker.
|
||||
}
|
||||
if marker == markerAPP1 {
|
||||
break
|
||||
}
|
||||
if size < 2 {
|
||||
return orientationUnspecified // Invalid block size.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(size-2)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
}
|
||||
|
||||
// Check if EXIF header is present.
|
||||
var header uint32
|
||||
if err := binary.Read(r, binary.BigEndian, &header); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if header != exifHeader {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read byte order information.
|
||||
var (
|
||||
byteOrderTag uint16
|
||||
byteOrder binary.ByteOrder
|
||||
)
|
||||
if err := binary.Read(r, binary.BigEndian, &byteOrderTag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
switch byteOrderTag {
|
||||
case byteOrderBE:
|
||||
byteOrder = binary.BigEndian
|
||||
case byteOrderLE:
|
||||
byteOrder = binary.LittleEndian
|
||||
default:
|
||||
return orientationUnspecified // Invalid byte order flag.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Skip the EXIF offset.
|
||||
var offset uint32
|
||||
if err := binary.Read(r, byteOrder, &offset); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if offset < 8 {
|
||||
return orientationUnspecified // Invalid offset value.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(offset-8)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read the number of tags.
|
||||
var numTags uint16
|
||||
if err := binary.Read(r, byteOrder, &numTags); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Find the orientation tag.
|
||||
for i := 0; i < int(numTags); i++ {
|
||||
var tag uint16
|
||||
if err := binary.Read(r, byteOrder, &tag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if tag != orientationTag {
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 10); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
continue
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 6); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
var val uint16
|
||||
if err := binary.Read(r, byteOrder, &val); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if val < 1 || val > 8 {
|
||||
return orientationUnspecified // Invalid tag value.
|
||||
}
|
||||
return orientation(val)
|
||||
}
|
||||
return orientationUnspecified // Missing orientation tag.
|
||||
}
|
||||
|
||||
// fixOrientation applies a transform to img corresponding to the given orientation flag.
|
||||
func fixOrientation(img image.Image, o orientation) image.Image {
|
||||
switch o {
|
||||
case orientationNormal:
|
||||
case orientationFlipH:
|
||||
img = FlipH(img)
|
||||
case orientationFlipV:
|
||||
img = FlipV(img)
|
||||
case orientationRotate90:
|
||||
img = Rotate90(img)
|
||||
case orientationRotate180:
|
||||
img = Rotate180(img)
|
||||
case orientationRotate270:
|
||||
img = Rotate270(img)
|
||||
case orientationTranspose:
|
||||
img = Transpose(img)
|
||||
case orientationTransverse:
|
||||
img = Transverse(img)
|
||||
}
|
||||
return img
|
||||
}
|
|
@ -0,0 +1,595 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
)
|
||||
|
||||
type indexWeight struct {
|
||||
index int
|
||||
weight float64
|
||||
}
|
||||
|
||||
func precomputeWeights(dstSize, srcSize int, filter ResampleFilter) [][]indexWeight {
|
||||
du := float64(srcSize) / float64(dstSize)
|
||||
scale := du
|
||||
if scale < 1.0 {
|
||||
scale = 1.0
|
||||
}
|
||||
ru := math.Ceil(scale * filter.Support)
|
||||
|
||||
out := make([][]indexWeight, dstSize)
|
||||
tmp := make([]indexWeight, 0, dstSize*int(ru+2)*2)
|
||||
|
||||
for v := 0; v < dstSize; v++ {
|
||||
fu := (float64(v)+0.5)*du - 0.5
|
||||
|
||||
begin := int(math.Ceil(fu - ru))
|
||||
if begin < 0 {
|
||||
begin = 0
|
||||
}
|
||||
end := int(math.Floor(fu + ru))
|
||||
if end > srcSize-1 {
|
||||
end = srcSize - 1
|
||||
}
|
||||
|
||||
var sum float64
|
||||
for u := begin; u <= end; u++ {
|
||||
w := filter.Kernel((float64(u) - fu) / scale)
|
||||
if w != 0 {
|
||||
sum += w
|
||||
tmp = append(tmp, indexWeight{index: u, weight: w})
|
||||
}
|
||||
}
|
||||
if sum != 0 {
|
||||
for i := range tmp {
|
||||
tmp[i].weight /= sum
|
||||
}
|
||||
}
|
||||
|
||||
out[v] = tmp
|
||||
tmp = tmp[len(tmp):]
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// Resize resizes the image to the specified width and height using the specified resampling
|
||||
// filter and returns the transformed image. If one of width or height is 0, the image aspect
|
||||
// ratio is preserved.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Resize(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Resize(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
if dstW < 0 || dstH < 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
if dstW == 0 && dstH == 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcW := img.Bounds().Dx()
|
||||
srcH := img.Bounds().Dy()
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
// If new width or height is 0 then preserve aspect ratio, minimum 1px.
|
||||
if dstW == 0 {
|
||||
tmpW := float64(dstH) * float64(srcW) / float64(srcH)
|
||||
dstW = int(math.Max(1.0, math.Floor(tmpW+0.5)))
|
||||
}
|
||||
if dstH == 0 {
|
||||
tmpH := float64(dstW) * float64(srcH) / float64(srcW)
|
||||
dstH = int(math.Max(1.0, math.Floor(tmpH+0.5)))
|
||||
}
|
||||
|
||||
if filter.Support <= 0 {
|
||||
// Nearest-neighbor special case.
|
||||
return resizeNearest(img, dstW, dstH)
|
||||
}
|
||||
|
||||
if srcW != dstW && srcH != dstH {
|
||||
return resizeVertical(resizeHorizontal(img, dstW, filter), dstH, filter)
|
||||
}
|
||||
if srcW != dstW {
|
||||
return resizeHorizontal(img, dstW, filter)
|
||||
}
|
||||
if srcH != dstH {
|
||||
return resizeVertical(img, dstH, filter)
|
||||
}
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, width, src.h))
|
||||
weights := precomputeWeights(width, src.w, filter)
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
j0 := y * dst.Stride
|
||||
for x := range weights {
|
||||
var r, g, b, a float64
|
||||
for _, w := range weights[x] {
|
||||
i := w.index * 4
|
||||
s := scanLine[i : i+4 : i+4]
|
||||
aw := float64(s[3]) * w.weight
|
||||
r += float64(s[0]) * aw
|
||||
g += float64(s[1]) * aw
|
||||
b += float64(s[2]) * aw
|
||||
a += aw
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := j0 + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, height))
|
||||
weights := precomputeWeights(height, src.h, filter)
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for y := range weights {
|
||||
var r, g, b, a float64
|
||||
for _, w := range weights[y] {
|
||||
i := w.index * 4
|
||||
s := scanLine[i : i+4 : i+4]
|
||||
aw := float64(s[3]) * w.weight
|
||||
r += float64(s[0]) * aw
|
||||
g += float64(s[1]) * aw
|
||||
b += float64(s[2]) * aw
|
||||
a += aw
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
j := y*dst.Stride + x*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// resizeNearest is a fast nearest-neighbor resize, no filtering.
|
||||
func resizeNearest(img image.Image, width, height int) *image.NRGBA {
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, width, height))
|
||||
dx := float64(img.Bounds().Dx()) / float64(width)
|
||||
dy := float64(img.Bounds().Dy()) / float64(height)
|
||||
|
||||
if dx > 1 && dy > 1 {
|
||||
src := newScanner(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
src.scan(srcX, srcY, srcX+1, srcY+1, dst.Pix[dstOff:dstOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
src := toNRGBA(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
srcOff0 := srcY * src.Stride
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
srcOff := srcOff0 + srcX*4
|
||||
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// Fit scales down the image using the specified resample filter to fit the specified
|
||||
// maximum width and height and returns the transformed image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Fit(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
maxW, maxH := width, height
|
||||
|
||||
if maxW <= 0 || maxH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
if srcW <= maxW && srcH <= maxH {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
maxAspectRatio := float64(maxW) / float64(maxH)
|
||||
|
||||
var newW, newH int
|
||||
if srcAspectRatio > maxAspectRatio {
|
||||
newW = maxW
|
||||
newH = int(float64(newW) / srcAspectRatio)
|
||||
} else {
|
||||
newH = maxH
|
||||
newW = int(float64(newH) * srcAspectRatio)
|
||||
}
|
||||
|
||||
return Resize(img, newW, newH, filter)
|
||||
}
|
||||
|
||||
// Fill creates an image with the specified dimensions and fills it with the scaled source image.
|
||||
// To achieve the correct aspect ratio without stretching, the source image will be cropped.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fill(srcImage, 800, 600, imaging.Center, imaging.Lanczos)
|
||||
//
|
||||
func Fill(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
if dstW <= 0 || dstH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
|
||||
if srcW <= 0 || srcH <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
if srcW == dstW && srcH == dstH {
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
if srcW >= 100 && srcH >= 100 {
|
||||
return cropAndResize(img, dstW, dstH, anchor, filter)
|
||||
}
|
||||
return resizeAndCrop(img, dstW, dstH, anchor, filter)
|
||||
}
|
||||
|
||||
// cropAndResize crops the image to the smallest possible size that has the required aspect ratio using
|
||||
// the given anchor point, then scales it to the specified dimensions and returns the transformed image.
|
||||
//
|
||||
// This is generally faster than resizing first, but may result in inaccuracies when used on small source images.
|
||||
func cropAndResize(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
dstAspectRatio := float64(dstW) / float64(dstH)
|
||||
|
||||
var tmp *image.NRGBA
|
||||
if srcAspectRatio < dstAspectRatio {
|
||||
cropH := float64(srcW) * float64(dstH) / float64(dstW)
|
||||
tmp = CropAnchor(img, srcW, int(math.Max(1, cropH)+0.5), anchor)
|
||||
} else {
|
||||
cropW := float64(srcH) * float64(dstW) / float64(dstH)
|
||||
tmp = CropAnchor(img, int(math.Max(1, cropW)+0.5), srcH, anchor)
|
||||
}
|
||||
|
||||
return Resize(tmp, dstW, dstH, filter)
|
||||
}
|
||||
|
||||
// resizeAndCrop resizes the image to the smallest possible size that will cover the specified dimensions,
|
||||
// crops the resized image to the specified dimensions using the given anchor point and returns
|
||||
// the transformed image.
|
||||
func resizeAndCrop(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
srcBounds := img.Bounds()
|
||||
srcW := srcBounds.Dx()
|
||||
srcH := srcBounds.Dy()
|
||||
srcAspectRatio := float64(srcW) / float64(srcH)
|
||||
dstAspectRatio := float64(dstW) / float64(dstH)
|
||||
|
||||
var tmp *image.NRGBA
|
||||
if srcAspectRatio < dstAspectRatio {
|
||||
tmp = Resize(img, dstW, 0, filter)
|
||||
} else {
|
||||
tmp = Resize(img, 0, dstH, filter)
|
||||
}
|
||||
|
||||
return CropAnchor(tmp, dstW, dstH, anchor)
|
||||
}
|
||||
|
||||
// Thumbnail scales the image up or down using the specified resample filter, crops it
|
||||
// to the specified width and hight and returns the transformed image.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Thumbnail(srcImage, 100, 100, imaging.Lanczos)
|
||||
//
|
||||
func Thumbnail(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
return Fill(img, width, height, Center, filter)
|
||||
}
|
||||
|
||||
// ResampleFilter specifies a resampling filter to be used for image resizing.
|
||||
//
|
||||
// General filter recommendations:
|
||||
//
|
||||
// - Lanczos
|
||||
// A high-quality resampling filter for photographic images yielding sharp results.
|
||||
//
|
||||
// - CatmullRom
|
||||
// A sharp cubic filter that is faster than Lanczos filter while providing similar results.
|
||||
//
|
||||
// - MitchellNetravali
|
||||
// A cubic filter that produces smoother results with less ringing artifacts than CatmullRom.
|
||||
//
|
||||
// - Linear
|
||||
// Bilinear resampling filter, produces a smooth output. Faster than cubic filters.
|
||||
//
|
||||
// - Box
|
||||
// Simple and fast averaging filter appropriate for downscaling.
|
||||
// When upscaling it's similar to NearestNeighbor.
|
||||
//
|
||||
// - NearestNeighbor
|
||||
// Fastest resampling filter, no antialiasing.
|
||||
//
|
||||
type ResampleFilter struct {
|
||||
Support float64
|
||||
Kernel func(float64) float64
|
||||
}
|
||||
|
||||
// NearestNeighbor is a nearest-neighbor filter (no anti-aliasing).
|
||||
var NearestNeighbor ResampleFilter
|
||||
|
||||
// Box filter (averaging pixels).
|
||||
var Box ResampleFilter
|
||||
|
||||
// Linear filter.
|
||||
var Linear ResampleFilter
|
||||
|
||||
// Hermite cubic spline filter (BC-spline; B=0; C=0).
|
||||
var Hermite ResampleFilter
|
||||
|
||||
// MitchellNetravali is Mitchell-Netravali cubic filter (BC-spline; B=1/3; C=1/3).
|
||||
var MitchellNetravali ResampleFilter
|
||||
|
||||
// CatmullRom is a Catmull-Rom - sharp cubic filter (BC-spline; B=0; C=0.5).
|
||||
var CatmullRom ResampleFilter
|
||||
|
||||
// BSpline is a smooth cubic filter (BC-spline; B=1; C=0).
|
||||
var BSpline ResampleFilter
|
||||
|
||||
// Gaussian is a Gaussian blurring filter.
|
||||
var Gaussian ResampleFilter
|
||||
|
||||
// Bartlett is a Bartlett-windowed sinc filter (3 lobes).
|
||||
var Bartlett ResampleFilter
|
||||
|
||||
// Lanczos filter (3 lobes).
|
||||
var Lanczos ResampleFilter
|
||||
|
||||
// Hann is a Hann-windowed sinc filter (3 lobes).
|
||||
var Hann ResampleFilter
|
||||
|
||||
// Hamming is a Hamming-windowed sinc filter (3 lobes).
|
||||
var Hamming ResampleFilter
|
||||
|
||||
// Blackman is a Blackman-windowed sinc filter (3 lobes).
|
||||
var Blackman ResampleFilter
|
||||
|
||||
// Welch is a Welch-windowed sinc filter (parabolic window, 3 lobes).
|
||||
var Welch ResampleFilter
|
||||
|
||||
// Cosine is a Cosine-windowed sinc filter (3 lobes).
|
||||
var Cosine ResampleFilter
|
||||
|
||||
func bcspline(x, b, c float64) float64 {
|
||||
var y float64
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
y = ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6
|
||||
} else if x < 2.0 {
|
||||
y = ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6
|
||||
}
|
||||
return y
|
||||
}
|
||||
|
||||
func sinc(x float64) float64 {
|
||||
if x == 0 {
|
||||
return 1
|
||||
}
|
||||
return math.Sin(math.Pi*x) / (math.Pi * x)
|
||||
}
|
||||
|
||||
func init() {
|
||||
NearestNeighbor = ResampleFilter{
|
||||
Support: 0.0, // special case - not applying the filter
|
||||
}
|
||||
|
||||
Box = ResampleFilter{
|
||||
Support: 0.5,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x <= 0.5 {
|
||||
return 1.0
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Linear = ResampleFilter{
|
||||
Support: 1.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
return 1.0 - x
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hermite = ResampleFilter{
|
||||
Support: 1.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 1.0 {
|
||||
return bcspline(x, 0.0, 0.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
MitchellNetravali = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 1.0/3.0, 1.0/3.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
CatmullRom = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 0.0, 0.5)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
BSpline = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return bcspline(x, 1.0, 0.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Gaussian = ResampleFilter{
|
||||
Support: 2.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 2.0 {
|
||||
return math.Exp(-2 * x * x)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Bartlett = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (3.0 - x) / 3.0
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Lanczos = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * sinc(x/3.0)
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hann = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.5 + 0.5*math.Cos(math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Hamming = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.54 + 0.46*math.Cos(math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Blackman = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (0.42 - 0.5*math.Cos(math.Pi*x/3.0+math.Pi) + 0.08*math.Cos(2.0*math.Pi*x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Welch = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * (1.0 - (x * x / 9.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
|
||||
Cosine = ResampleFilter{
|
||||
Support: 3.0,
|
||||
Kernel: func(x float64) float64 {
|
||||
x = math.Abs(x)
|
||||
if x < 3.0 {
|
||||
return sinc(x) * math.Cos((math.Pi/2.0)*(x/3.0))
|
||||
}
|
||||
return 0
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,285 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
type scanner struct {
|
||||
image image.Image
|
||||
w, h int
|
||||
palette []color.NRGBA
|
||||
}
|
||||
|
||||
func newScanner(img image.Image) *scanner {
|
||||
s := &scanner{
|
||||
image: img,
|
||||
w: img.Bounds().Dx(),
|
||||
h: img.Bounds().Dy(),
|
||||
}
|
||||
if img, ok := img.(*image.Paletted); ok {
|
||||
s.palette = make([]color.NRGBA, len(img.Palette))
|
||||
for i := 0; i < len(img.Palette); i++ {
|
||||
s.palette[i] = color.NRGBAModel.Convert(img.Palette[i]).(color.NRGBA)
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// scan scans the given rectangular region of the image into dst.
|
||||
func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
switch img := s.image.(type) {
|
||||
case *image.NRGBA:
|
||||
size := (x2 - x1) * 4
|
||||
j := 0
|
||||
i := y1*img.Stride + x1*4
|
||||
if size == 4 {
|
||||
for y := y1; y < y2; y++ {
|
||||
d := dst[j : j+4 : j+4]
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
d[3] = s[3]
|
||||
j += size
|
||||
i += img.Stride
|
||||
}
|
||||
} else {
|
||||
for y := y1; y < y2; y++ {
|
||||
copy(dst[j:j+size], img.Pix[i:i+size])
|
||||
j += size
|
||||
i += img.Stride
|
||||
}
|
||||
}
|
||||
|
||||
case *image.NRGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
s := img.Pix[i : i+8 : i+8]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[2]
|
||||
d[2] = s[4]
|
||||
d[3] = s[6]
|
||||
j += 4
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*4
|
||||
for x := x1; x < x2; x++ {
|
||||
d := dst[j : j+4 : j+4]
|
||||
a := img.Pix[i+3]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
d[3] = a
|
||||
case 0xff:
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
d[3] = a
|
||||
default:
|
||||
s := img.Pix[i : i+4 : i+4]
|
||||
r16 := uint16(s[0])
|
||||
g16 := uint16(s[1])
|
||||
b16 := uint16(s[2])
|
||||
a16 := uint16(a)
|
||||
d[0] = uint8(r16 * 0xff / a16)
|
||||
d[1] = uint8(g16 * 0xff / a16)
|
||||
d[2] = uint8(b16 * 0xff / a16)
|
||||
d[3] = a
|
||||
}
|
||||
j += 4
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
s := img.Pix[i : i+8 : i+8]
|
||||
d := dst[j : j+4 : j+4]
|
||||
a := s[6]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
case 0xff:
|
||||
d[0] = s[0]
|
||||
d[1] = s[2]
|
||||
d[2] = s[4]
|
||||
default:
|
||||
r32 := uint32(s[0])<<8 | uint32(s[1])
|
||||
g32 := uint32(s[2])<<8 | uint32(s[3])
|
||||
b32 := uint32(s[4])<<8 | uint32(s[5])
|
||||
a32 := uint32(s[6])<<8 | uint32(s[7])
|
||||
d[0] = uint8((r32 * 0xffff / a32) >> 8)
|
||||
d[1] = uint8((g32 * 0xffff / a32) >> 8)
|
||||
d[2] = uint8((b32 * 0xffff / a32) >> 8)
|
||||
}
|
||||
d[3] = a
|
||||
j += 4
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
d[3] = 0xff
|
||||
j += 4
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray16:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*2
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
d[3] = 0xff
|
||||
j += 4
|
||||
i += 2
|
||||
}
|
||||
}
|
||||
|
||||
case *image.YCbCr:
|
||||
j := 0
|
||||
x1 += img.Rect.Min.X
|
||||
x2 += img.Rect.Min.X
|
||||
y1 += img.Rect.Min.Y
|
||||
y2 += img.Rect.Min.Y
|
||||
|
||||
hy := img.Rect.Min.Y / 2
|
||||
hx := img.Rect.Min.X / 2
|
||||
for y := y1; y < y2; y++ {
|
||||
iy := (y-img.Rect.Min.Y)*img.YStride + (x1 - img.Rect.Min.X)
|
||||
|
||||
var yBase int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio422:
|
||||
yBase = (y - img.Rect.Min.Y) * img.CStride
|
||||
case image.YCbCrSubsampleRatio420, image.YCbCrSubsampleRatio440:
|
||||
yBase = (y/2 - hy) * img.CStride
|
||||
}
|
||||
|
||||
for x := x1; x < x2; x++ {
|
||||
var ic int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio440:
|
||||
ic = yBase + (x - img.Rect.Min.X)
|
||||
case image.YCbCrSubsampleRatio422, image.YCbCrSubsampleRatio420:
|
||||
ic = yBase + (x/2 - hx)
|
||||
default:
|
||||
ic = img.COffset(x, y)
|
||||
}
|
||||
|
||||
yy1 := int32(img.Y[iy]) * 0x10101
|
||||
cb1 := int32(img.Cb[ic]) - 128
|
||||
cr1 := int32(img.Cr[ic]) - 128
|
||||
|
||||
r := yy1 + 91881*cr1
|
||||
if uint32(r)&0xff000000 == 0 {
|
||||
r >>= 16
|
||||
} else {
|
||||
r = ^(r >> 31)
|
||||
}
|
||||
|
||||
g := yy1 - 22554*cb1 - 46802*cr1
|
||||
if uint32(g)&0xff000000 == 0 {
|
||||
g >>= 16
|
||||
} else {
|
||||
g = ^(g >> 31)
|
||||
}
|
||||
|
||||
b := yy1 + 116130*cb1
|
||||
if uint32(b)&0xff000000 == 0 {
|
||||
b >>= 16
|
||||
} else {
|
||||
b = ^(b >> 31)
|
||||
}
|
||||
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = uint8(r)
|
||||
d[1] = uint8(g)
|
||||
d[2] = uint8(b)
|
||||
d[3] = 0xff
|
||||
|
||||
iy++
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Paletted:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := s.palette[img.Pix[i]]
|
||||
d := dst[j : j+4 : j+4]
|
||||
d[0] = c.R
|
||||
d[1] = c.G
|
||||
d[2] = c.B
|
||||
d[3] = c.A
|
||||
j += 4
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
j := 0
|
||||
b := s.image.Bounds()
|
||||
x1 += b.Min.X
|
||||
x2 += b.Min.X
|
||||
y1 += b.Min.Y
|
||||
y2 += b.Min.Y
|
||||
for y := y1; y < y2; y++ {
|
||||
for x := x1; x < x2; x++ {
|
||||
r16, g16, b16, a16 := s.image.At(x, y).RGBA()
|
||||
d := dst[j : j+4 : j+4]
|
||||
switch a16 {
|
||||
case 0xffff:
|
||||
d[0] = uint8(r16 >> 8)
|
||||
d[1] = uint8(g16 >> 8)
|
||||
d[2] = uint8(b16 >> 8)
|
||||
d[3] = 0xff
|
||||
case 0:
|
||||
d[0] = 0
|
||||
d[1] = 0
|
||||
d[2] = 0
|
||||
d[3] = 0
|
||||
default:
|
||||
d[0] = uint8(((r16 * 0xffff) / a16) >> 8)
|
||||
d[1] = uint8(((g16 * 0xffff) / a16) >> 8)
|
||||
d[2] = uint8(((b16 * 0xffff) / a16) >> 8)
|
||||
d[3] = uint8(a16 >> 8)
|
||||
}
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// New creates a new image with the specified width and height, and fills it with the specified color.
|
||||
func New(width, height int, fillColor color.Color) *image.NRGBA {
|
||||
if width <= 0 || height <= 0 {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
|
||||
c := color.NRGBAModel.Convert(fillColor).(color.NRGBA)
|
||||
if (c == color.NRGBA{0, 0, 0, 0}) {
|
||||
return image.NewNRGBA(image.Rect(0, 0, width, height))
|
||||
}
|
||||
|
||||
return &image.NRGBA{
|
||||
Pix: bytes.Repeat([]byte{c.R, c.G, c.B, c.A}, width*height),
|
||||
Stride: 4 * width,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}
|
||||
}
|
||||
|
||||
// Clone returns a copy of the given image.
|
||||
func Clone(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
size := src.w * 4
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+size])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Anchor is the anchor point for image alignment.
|
||||
type Anchor int
|
||||
|
||||
// Anchor point positions.
|
||||
const (
|
||||
Center Anchor = iota
|
||||
TopLeft
|
||||
Top
|
||||
TopRight
|
||||
Left
|
||||
Right
|
||||
BottomLeft
|
||||
Bottom
|
||||
BottomRight
|
||||
)
|
||||
|
||||
func anchorPt(b image.Rectangle, w, h int, anchor Anchor) image.Point {
|
||||
var x, y int
|
||||
switch anchor {
|
||||
case TopLeft:
|
||||
x = b.Min.X
|
||||
y = b.Min.Y
|
||||
case Top:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Min.Y
|
||||
case TopRight:
|
||||
x = b.Max.X - w
|
||||
y = b.Min.Y
|
||||
case Left:
|
||||
x = b.Min.X
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
case Right:
|
||||
x = b.Max.X - w
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
case BottomLeft:
|
||||
x = b.Min.X
|
||||
y = b.Max.Y - h
|
||||
case Bottom:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Max.Y - h
|
||||
case BottomRight:
|
||||
x = b.Max.X - w
|
||||
y = b.Max.Y - h
|
||||
default:
|
||||
x = b.Min.X + (b.Dx()-w)/2
|
||||
y = b.Min.Y + (b.Dy()-h)/2
|
||||
}
|
||||
return image.Pt(x, y)
|
||||
}
|
||||
|
||||
// Crop cuts out a rectangular region with the specified bounds
|
||||
// from the image and returns the cropped image.
|
||||
func Crop(img image.Image, rect image.Rectangle) *image.NRGBA {
|
||||
r := rect.Intersect(img.Bounds()).Sub(img.Bounds().Min)
|
||||
if r.Empty() {
|
||||
return &image.NRGBA{}
|
||||
}
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, r.Dx(), r.Dy()))
|
||||
rowSize := r.Dx() * 4
|
||||
parallel(r.Min.Y, r.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
i := (y - r.Min.Y) * dst.Stride
|
||||
src.scan(r.Min.X, y, r.Max.X, y+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// CropAnchor cuts out a rectangular region with the specified size
|
||||
// from the image using the specified anchor point and returns the cropped image.
|
||||
func CropAnchor(img image.Image, width, height int, anchor Anchor) *image.NRGBA {
|
||||
srcBounds := img.Bounds()
|
||||
pt := anchorPt(srcBounds, width, height, anchor)
|
||||
r := image.Rect(0, 0, width, height).Add(pt)
|
||||
b := srcBounds.Intersect(r)
|
||||
return Crop(img, b)
|
||||
}
|
||||
|
||||
// CropCenter cuts out a rectangular region with the specified size
|
||||
// from the center of the image and returns the cropped image.
|
||||
func CropCenter(img image.Image, width, height int) *image.NRGBA {
|
||||
return CropAnchor(img, width, height, Center)
|
||||
}
|
||||
|
||||
// Paste pastes the img image to the background image at the specified position and returns the combined image.
|
||||
func Paste(background, img image.Image, pos image.Point) *image.NRGBA {
|
||||
dst := Clone(background)
|
||||
pos = pos.Sub(background.Bounds().Min)
|
||||
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
|
||||
interRect := pasteRect.Intersect(dst.Bounds())
|
||||
if interRect.Empty() {
|
||||
return dst
|
||||
}
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
i1 := y*dst.Stride + interRect.Min.X*4
|
||||
i2 := i1 + interRect.Dx()*4
|
||||
src.scan(x1, y1, x2, y2, dst.Pix[i1:i2])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// PasteCenter pastes the img image to the center of the background image and returns the combined image.
|
||||
func PasteCenter(background, img image.Image) *image.NRGBA {
|
||||
bgBounds := background.Bounds()
|
||||
bgW := bgBounds.Dx()
|
||||
bgH := bgBounds.Dy()
|
||||
bgMinX := bgBounds.Min.X
|
||||
bgMinY := bgBounds.Min.Y
|
||||
|
||||
centerX := bgMinX + bgW/2
|
||||
centerY := bgMinY + bgH/2
|
||||
|
||||
x0 := centerX - img.Bounds().Dx()/2
|
||||
y0 := centerY - img.Bounds().Dy()/2
|
||||
|
||||
return Paste(background, img, image.Pt(x0, y0))
|
||||
}
|
||||
|
||||
// Overlay draws the img image over the background image at given position
|
||||
// and returns the combined image. Opacity parameter is the opacity of the img
|
||||
// image layer, used to compose the images, it must be from 0.0 to 1.0.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Draw spriteImage over backgroundImage at the given position (x=50, y=50).
|
||||
// dstImage := imaging.Overlay(backgroundImage, spriteImage, image.Pt(50, 50), 1.0)
|
||||
//
|
||||
// // Blend two opaque images of the same size.
|
||||
// dstImage := imaging.Overlay(imageOne, imageTwo, image.Pt(0, 0), 0.5)
|
||||
//
|
||||
func Overlay(background, img image.Image, pos image.Point, opacity float64) *image.NRGBA {
|
||||
opacity = math.Min(math.Max(opacity, 0.0), 1.0) // Ensure 0.0 <= opacity <= 1.0.
|
||||
dst := Clone(background)
|
||||
pos = pos.Sub(background.Bounds().Min)
|
||||
pasteRect := image.Rectangle{Min: pos, Max: pos.Add(img.Bounds().Size())}
|
||||
interRect := pasteRect.Intersect(dst.Bounds())
|
||||
if interRect.Empty() {
|
||||
return dst
|
||||
}
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
scanLine := make([]uint8, interRect.Dx()*4)
|
||||
for y := range ys {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
src.scan(x1, y1, x2, y2, scanLine)
|
||||
i := y*dst.Stride + interRect.Min.X*4
|
||||
j := 0
|
||||
for x := interRect.Min.X; x < interRect.Max.X; x++ {
|
||||
d := dst.Pix[i : i+4 : i+4]
|
||||
r1 := float64(d[0])
|
||||
g1 := float64(d[1])
|
||||
b1 := float64(d[2])
|
||||
a1 := float64(d[3])
|
||||
|
||||
s := scanLine[j : j+4 : j+4]
|
||||
r2 := float64(s[0])
|
||||
g2 := float64(s[1])
|
||||
b2 := float64(s[2])
|
||||
a2 := float64(s[3])
|
||||
|
||||
coef2 := opacity * a2 / 255
|
||||
coef1 := (1 - coef2) * a1 / 255
|
||||
coefSum := coef1 + coef2
|
||||
coef1 /= coefSum
|
||||
coef2 /= coefSum
|
||||
|
||||
d[0] = uint8(r1*coef1 + r2*coef2)
|
||||
d[1] = uint8(g1*coef1 + g2*coef2)
|
||||
d[2] = uint8(b1*coef1 + b2*coef2)
|
||||
d[3] = uint8(math.Min(a1+a2*opacity*(255-a1)/255, 255))
|
||||
|
||||
i += 4
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// OverlayCenter overlays the img image to the center of the background image and
|
||||
// returns the combined image. Opacity parameter is the opacity of the img
|
||||
// image layer, used to compose the images, it must be from 0.0 to 1.0.
|
||||
func OverlayCenter(background, img image.Image, opacity float64) *image.NRGBA {
|
||||
bgBounds := background.Bounds()
|
||||
bgW := bgBounds.Dx()
|
||||
bgH := bgBounds.Dy()
|
||||
bgMinX := bgBounds.Min.X
|
||||
bgMinY := bgBounds.Min.Y
|
||||
|
||||
centerX := bgMinX + bgW/2
|
||||
centerY := bgMinY + bgH/2
|
||||
|
||||
x0 := centerX - img.Bounds().Dx()/2
|
||||
y0 := centerY - img.Bounds().Dy()/2
|
||||
|
||||
return Overlay(background, img, image.Point{x0, y0}, opacity)
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"math"
|
||||
)
|
||||
|
||||
// FlipH flips the image horizontally (from left to right) and returns the transformed image.
|
||||
func FlipH(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstY
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// FlipV flips the image vertically (from top to bottom) and returns the transformed image.
|
||||
func FlipV(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Transpose flips the image horizontally and rotates 90 degrees counter-clockwise.
|
||||
func Transpose(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Transverse flips the image vertically and rotates 90 degrees counter-clockwise.
|
||||
func Transverse(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate90 rotates the image 90 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate90(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate180 rotates the image 180 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate180(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.w
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate270 rotates the image 270 degrees counter-clockwise and returns the transformed image.
|
||||
func Rotate270(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dstW := src.h
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
return dst
|
||||
}
|
||||
|
||||
// Rotate rotates an image by the given angle counter-clockwise .
|
||||
// The angle parameter is the rotation angle in degrees.
|
||||
// The bgColor parameter specifies the color of the uncovered zone after the rotation.
|
||||
func Rotate(img image.Image, angle float64, bgColor color.Color) *image.NRGBA {
|
||||
angle = angle - math.Floor(angle/360)*360
|
||||
|
||||
switch angle {
|
||||
case 0:
|
||||
return Clone(img)
|
||||
case 90:
|
||||
return Rotate90(img)
|
||||
case 180:
|
||||
return Rotate180(img)
|
||||
case 270:
|
||||
return Rotate270(img)
|
||||
}
|
||||
|
||||
src := toNRGBA(img)
|
||||
srcW := src.Bounds().Max.X
|
||||
srcH := src.Bounds().Max.Y
|
||||
dstW, dstH := rotatedSize(srcW, srcH, angle)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
|
||||
if dstW <= 0 || dstH <= 0 {
|
||||
return dst
|
||||
}
|
||||
|
||||
srcXOff := float64(srcW)/2 - 0.5
|
||||
srcYOff := float64(srcH)/2 - 0.5
|
||||
dstXOff := float64(dstW)/2 - 0.5
|
||||
dstYOff := float64(dstH)/2 - 0.5
|
||||
|
||||
bgColorNRGBA := color.NRGBAModel.Convert(bgColor).(color.NRGBA)
|
||||
sin, cos := math.Sincos(math.Pi * angle / 180)
|
||||
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
for dstX := 0; dstX < dstW; dstX++ {
|
||||
xf, yf := rotatePoint(float64(dstX)-dstXOff, float64(dstY)-dstYOff, sin, cos)
|
||||
xf, yf = xf+srcXOff, yf+srcYOff
|
||||
interpolatePoint(dst, dstX, dstY, src, xf, yf, bgColorNRGBA)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
func rotatePoint(x, y, sin, cos float64) (float64, float64) {
|
||||
return x*cos - y*sin, x*sin + y*cos
|
||||
}
|
||||
|
||||
func rotatedSize(w, h int, angle float64) (int, int) {
|
||||
if w <= 0 || h <= 0 {
|
||||
return 0, 0
|
||||
}
|
||||
|
||||
sin, cos := math.Sincos(math.Pi * angle / 180)
|
||||
x1, y1 := rotatePoint(float64(w-1), 0, sin, cos)
|
||||
x2, y2 := rotatePoint(float64(w-1), float64(h-1), sin, cos)
|
||||
x3, y3 := rotatePoint(0, float64(h-1), sin, cos)
|
||||
|
||||
minx := math.Min(x1, math.Min(x2, math.Min(x3, 0)))
|
||||
maxx := math.Max(x1, math.Max(x2, math.Max(x3, 0)))
|
||||
miny := math.Min(y1, math.Min(y2, math.Min(y3, 0)))
|
||||
maxy := math.Max(y1, math.Max(y2, math.Max(y3, 0)))
|
||||
|
||||
neww := maxx - minx + 1
|
||||
if neww-math.Floor(neww) > 0.1 {
|
||||
neww++
|
||||
}
|
||||
newh := maxy - miny + 1
|
||||
if newh-math.Floor(newh) > 0.1 {
|
||||
newh++
|
||||
}
|
||||
|
||||
return int(neww), int(newh)
|
||||
}
|
||||
|
||||
func interpolatePoint(dst *image.NRGBA, dstX, dstY int, src *image.NRGBA, xf, yf float64, bgColor color.NRGBA) {
|
||||
j := dstY*dst.Stride + dstX*4
|
||||
d := dst.Pix[j : j+4 : j+4]
|
||||
|
||||
x0 := int(math.Floor(xf))
|
||||
y0 := int(math.Floor(yf))
|
||||
bounds := src.Bounds()
|
||||
if !image.Pt(x0, y0).In(image.Rect(bounds.Min.X-1, bounds.Min.Y-1, bounds.Max.X, bounds.Max.Y)) {
|
||||
d[0] = bgColor.R
|
||||
d[1] = bgColor.G
|
||||
d[2] = bgColor.B
|
||||
d[3] = bgColor.A
|
||||
return
|
||||
}
|
||||
|
||||
xq := xf - float64(x0)
|
||||
yq := yf - float64(y0)
|
||||
points := [4]image.Point{
|
||||
{x0, y0},
|
||||
{x0 + 1, y0},
|
||||
{x0, y0 + 1},
|
||||
{x0 + 1, y0 + 1},
|
||||
}
|
||||
weights := [4]float64{
|
||||
(1 - xq) * (1 - yq),
|
||||
xq * (1 - yq),
|
||||
(1 - xq) * yq,
|
||||
xq * yq,
|
||||
}
|
||||
|
||||
var r, g, b, a float64
|
||||
for i := 0; i < 4; i++ {
|
||||
p := points[i]
|
||||
w := weights[i]
|
||||
if p.In(bounds) {
|
||||
i := p.Y*src.Stride + p.X*4
|
||||
s := src.Pix[i : i+4 : i+4]
|
||||
wa := float64(s[3]) * w
|
||||
r += float64(s[0]) * wa
|
||||
g += float64(s[1]) * wa
|
||||
b += float64(s[2]) * wa
|
||||
a += wa
|
||||
} else {
|
||||
wa := float64(bgColor.A) * w
|
||||
r += float64(bgColor.R) * wa
|
||||
g += float64(bgColor.G) * wa
|
||||
b += float64(bgColor.B) * wa
|
||||
a += wa
|
||||
}
|
||||
}
|
||||
if a != 0 {
|
||||
aInv := 1 / a
|
||||
d[0] = clamp(r * aInv)
|
||||
d[1] = clamp(g * aInv)
|
||||
d[2] = clamp(b * aInv)
|
||||
d[3] = clamp(a)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,167 @@
|
|||
package imaging
|
||||
|
||||
import (
|
||||
"image"
|
||||
"math"
|
||||
"runtime"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// parallel processes the data in separate goroutines.
|
||||
func parallel(start, stop int, fn func(<-chan int)) {
|
||||
count := stop - start
|
||||
if count < 1 {
|
||||
return
|
||||
}
|
||||
|
||||
procs := runtime.GOMAXPROCS(0)
|
||||
if procs > count {
|
||||
procs = count
|
||||
}
|
||||
|
||||
c := make(chan int, count)
|
||||
for i := start; i < stop; i++ {
|
||||
c <- i
|
||||
}
|
||||
close(c)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < procs; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
fn(c)
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// absint returns the absolute value of i.
|
||||
func absint(i int) int {
|
||||
if i < 0 {
|
||||
return -i
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
// clamp rounds and clamps float64 value to fit into uint8.
|
||||
func clamp(x float64) uint8 {
|
||||
v := int64(x + 0.5)
|
||||
if v > 255 {
|
||||
return 255
|
||||
}
|
||||
if v > 0 {
|
||||
return uint8(v)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func reverse(pix []uint8) {
|
||||
if len(pix) <= 4 {
|
||||
return
|
||||
}
|
||||
i := 0
|
||||
j := len(pix) - 4
|
||||
for i < j {
|
||||
pi := pix[i : i+4 : i+4]
|
||||
pj := pix[j : j+4 : j+4]
|
||||
pi[0], pj[0] = pj[0], pi[0]
|
||||
pi[1], pj[1] = pj[1], pi[1]
|
||||
pi[2], pj[2] = pj[2], pi[2]
|
||||
pi[3], pj[3] = pj[3], pi[3]
|
||||
i += 4
|
||||
j -= 4
|
||||
}
|
||||
}
|
||||
|
||||
func toNRGBA(img image.Image) *image.NRGBA {
|
||||
if img, ok := img.(*image.NRGBA); ok {
|
||||
return &image.NRGBA{
|
||||
Pix: img.Pix,
|
||||
Stride: img.Stride,
|
||||
Rect: img.Rect.Sub(img.Rect.Min),
|
||||
}
|
||||
}
|
||||
return Clone(img)
|
||||
}
|
||||
|
||||
// rgbToHSL converts a color from RGB to HSL.
|
||||
func rgbToHSL(r, g, b uint8) (float64, float64, float64) {
|
||||
rr := float64(r) / 255
|
||||
gg := float64(g) / 255
|
||||
bb := float64(b) / 255
|
||||
|
||||
max := math.Max(rr, math.Max(gg, bb))
|
||||
min := math.Min(rr, math.Min(gg, bb))
|
||||
|
||||
l := (max + min) / 2
|
||||
|
||||
if max == min {
|
||||
return 0, 0, l
|
||||
}
|
||||
|
||||
var h, s float64
|
||||
d := max - min
|
||||
if l > 0.5 {
|
||||
s = d / (2 - max - min)
|
||||
} else {
|
||||
s = d / (max + min)
|
||||
}
|
||||
|
||||
switch max {
|
||||
case rr:
|
||||
h = (gg - bb) / d
|
||||
if g < b {
|
||||
h += 6
|
||||
}
|
||||
case gg:
|
||||
h = (bb-rr)/d + 2
|
||||
case bb:
|
||||
h = (rr-gg)/d + 4
|
||||
}
|
||||
h /= 6
|
||||
|
||||
return h, s, l
|
||||
}
|
||||
|
||||
// hslToRGB converts a color from HSL to RGB.
|
||||
func hslToRGB(h, s, l float64) (uint8, uint8, uint8) {
|
||||
var r, g, b float64
|
||||
if s == 0 {
|
||||
v := clamp(l * 255)
|
||||
return v, v, v
|
||||
}
|
||||
|
||||
var q float64
|
||||
if l < 0.5 {
|
||||
q = l * (1 + s)
|
||||
} else {
|
||||
q = l + s - l*s
|
||||
}
|
||||
p := 2*l - q
|
||||
|
||||
r = hueToRGB(p, q, h+1/3.0)
|
||||
g = hueToRGB(p, q, h)
|
||||
b = hueToRGB(p, q, h-1/3.0)
|
||||
|
||||
return clamp(r * 255), clamp(g * 255), clamp(b * 255)
|
||||
}
|
||||
|
||||
func hueToRGB(p, q, t float64) float64 {
|
||||
if t < 0 {
|
||||
t++
|
||||
}
|
||||
if t > 1 {
|
||||
t--
|
||||
}
|
||||
if t < 1/6.0 {
|
||||
return p + (q-p)*6*t
|
||||
}
|
||||
if t < 1/2.0 {
|
||||
return q
|
||||
}
|
||||
if t < 2/3.0 {
|
||||
return p + (q-p)*(2/3.0-t)*6
|
||||
}
|
||||
return p
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
script() {
|
||||
if [ "${TRAVIS_PULL_REQUEST}" == "false" ];
|
||||
then
|
||||
COVERALLS_PARALLEL=true
|
||||
|
||||
if [ ! -z "$JS" ];
|
||||
then
|
||||
bash js.cover.sh
|
||||
else
|
||||
go test -covermode=count -coverprofile=profile.cov
|
||||
fi
|
||||
|
||||
go get github.com/axw/gocov/gocov github.com/mattn/goveralls golang.org/x/tools/cmd/cover
|
||||
$HOME/gopath/bin/goveralls --coverprofile=profile.cov -service=travis-ci
|
||||
fi
|
||||
|
||||
if [ -z "$JS" ];
|
||||
then
|
||||
go get golang.org/x/lint/golint && golint ./...
|
||||
go vet
|
||||
go test -bench=.* -v ./...
|
||||
fi
|
||||
}
|
||||
|
||||
"$@"
|
|
@ -0,0 +1,19 @@
|
|||
language: go
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
go: 1.11
|
||||
- os: linux
|
||||
go: 1.11
|
||||
env:
|
||||
- JS=1
|
||||
- os: osx
|
||||
go: 1.11
|
||||
- os: windows
|
||||
go: 1.11
|
||||
script: bash .travis.sh script
|
||||
notifications:
|
||||
webhooks: https://coveralls.io/webhook
|
||||
email:
|
||||
on_success: never
|
||||
on_failure: change
|
|
@ -0,0 +1,22 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Dustin H
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
times
|
||||
==========
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/djherbis/times?status.svg)](https://godoc.org/github.com/djherbis/times)
|
||||
[![Release](https://img.shields.io/github/release/djherbis/times.svg)](https://github.com/djherbis/times/releases/latest)
|
||||
[![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE.txt)
|
||||
[![Build Status](https://travis-ci.org/djherbis/times.svg?branch=master)](https://travis-ci.org/djherbis/times)
|
||||
[![Coverage Status](https://coveralls.io/repos/djherbis/times/badge.svg?branch=master)](https://coveralls.io/r/djherbis/times?branch=master)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/djherbis/times)](https://goreportcard.com/report/github.com/djherbis/times)
|
||||
[![Sourcegraph](https://sourcegraph.com/github.com/djherbis/times/-/badge.svg)](https://sourcegraph.com/github.com/djherbis/times?badge)
|
||||
|
||||
Usage
|
||||
------------
|
||||
File Times for #golang
|
||||
|
||||
Go has a hidden time functions for most platforms, this repo makes them accessible.
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"gopkg.in/djherbis/times.v1"
|
||||
)
|
||||
|
||||
func main() {
|
||||
t, err := times.Stat("myfile")
|
||||
if err != nil {
|
||||
log.Fatal(err.Error())
|
||||
}
|
||||
|
||||
log.Println(t.AccessTime())
|
||||
log.Println(t.ModTime())
|
||||
|
||||
if t.HasChangeTime() {
|
||||
log.Println(t.ChangeTime())
|
||||
}
|
||||
|
||||
if t.HasBirthTime() {
|
||||
log.Println(t.BirthTime())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Supported Times
|
||||
------------
|
||||
| | windows | linux | solaris | dragonfly | nacl | freebsd | darwin | netbsd | openbsd | plan9 | js |
|
||||
|:-----:|:-------:|:-----:|:-------:|:---------:|:------:|:-------:|:----:|:------:|:-------:|:-----:|:-----:|
|
||||
| atime | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| mtime | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| ctime | ✓* | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | ✓ |
|
||||
| btime | ✓ | | | | | ✓ | ✓| ✓ | |
|
||||
|
||||
* Windows XP does not have ChangeTime so HasChangeTime = false,
|
||||
however Vista onward does have ChangeTime so Timespec.HasChangeTime() will
|
||||
only return false on those platforms when the syscall used to obtain them fails.
|
||||
* Also note, Get(FileInfo) will now only return values available in FileInfo.Sys(), this means Stat() is required to get ChangeTime on Windows
|
||||
|
||||
Installation
|
||||
------------
|
||||
```sh
|
||||
go get gopkg.in/djherbis/times.v1
|
||||
```
|
|
@ -0,0 +1,149 @@
|
|||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Stat returns the Timespec for the given filename.
|
||||
func Stat(name string) (Timespec, error) {
|
||||
ts, err := platformSpecficStat(name)
|
||||
if err == nil {
|
||||
return ts, err
|
||||
}
|
||||
|
||||
return stat(name, os.Stat)
|
||||
}
|
||||
|
||||
// Lstat returns the Timespec for the given filename, and does not follow Symlinks.
|
||||
func Lstat(name string) (Timespec, error) {
|
||||
ts, err := platformSpecficLstat(name)
|
||||
if err == nil {
|
||||
return ts, err
|
||||
}
|
||||
|
||||
return stat(name, os.Lstat)
|
||||
}
|
||||
|
||||
type timespecEx struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
btime
|
||||
}
|
||||
|
||||
// StatFile finds a Windows Timespec with ChangeTime.
|
||||
func StatFile(file *os.File) (Timespec, error) {
|
||||
return statFile(syscall.Handle(file.Fd()))
|
||||
}
|
||||
|
||||
func statFile(h syscall.Handle) (Timespec, error) {
|
||||
var fileInfo fileBasicInfo
|
||||
if err := getFileInformationByHandleEx(h, &fileInfo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var t timespecEx
|
||||
t.atime.v = time.Unix(0, fileInfo.LastAccessTime.Nanoseconds())
|
||||
t.mtime.v = time.Unix(0, fileInfo.LastWriteTime.Nanoseconds())
|
||||
t.ctime.v = time.Unix(0, fileInfo.ChangeTime.Nanoseconds())
|
||||
t.btime.v = time.Unix(0, fileInfo.CreationTime.Nanoseconds())
|
||||
return t, nil
|
||||
}
|
||||
|
||||
func platformSpecficLstat(name string) (Timespec, error) {
|
||||
if findProcErr != nil {
|
||||
return nil, findProcErr
|
||||
}
|
||||
|
||||
isSym, err := isSymlink(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var attrs = uint32(syscall.FILE_FLAG_BACKUP_SEMANTICS)
|
||||
if isSym {
|
||||
attrs |= syscall.FILE_FLAG_OPEN_REPARSE_POINT
|
||||
}
|
||||
|
||||
return openHandleAndStat(name, attrs)
|
||||
}
|
||||
|
||||
func isSymlink(name string) (bool, error) {
|
||||
fi, err := os.Lstat(name)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return fi.Mode()&os.ModeSymlink != 0, nil
|
||||
}
|
||||
|
||||
func platformSpecficStat(name string) (Timespec, error) {
|
||||
if findProcErr != nil {
|
||||
return nil, findProcErr
|
||||
}
|
||||
|
||||
return openHandleAndStat(name, syscall.FILE_FLAG_BACKUP_SEMANTICS)
|
||||
}
|
||||
|
||||
func openHandleAndStat(name string, attrs uint32) (Timespec, error) {
|
||||
pathp, e := syscall.UTF16PtrFromString(name)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
h, e := syscall.CreateFile(pathp,
|
||||
syscall.FILE_WRITE_ATTRIBUTES, syscall.FILE_SHARE_WRITE, nil,
|
||||
syscall.OPEN_EXISTING, attrs, 0)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer syscall.Close(h)
|
||||
|
||||
return statFile(h)
|
||||
}
|
||||
|
||||
var (
|
||||
findProcErr error
|
||||
procGetFileInformationByHandleEx *syscall.Proc
|
||||
)
|
||||
|
||||
func init() {
|
||||
var modkernel32 *syscall.DLL
|
||||
if modkernel32, findProcErr = syscall.LoadDLL("kernel32.dll"); findProcErr == nil {
|
||||
procGetFileInformationByHandleEx, findProcErr = modkernel32.FindProc("GetFileInformationByHandleEx")
|
||||
}
|
||||
}
|
||||
|
||||
// fileBasicInfo holds the C++ data for FileTimes.
|
||||
//
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/aa364217(v=vs.85).aspx
|
||||
type fileBasicInfo struct {
|
||||
CreationTime syscall.Filetime
|
||||
LastAccessTime syscall.Filetime
|
||||
LastWriteTime syscall.Filetime
|
||||
ChangeTime syscall.Filetime
|
||||
FileAttributes uint32
|
||||
_ uint32 // padding
|
||||
}
|
||||
|
||||
type fileInformationClass int
|
||||
|
||||
const (
|
||||
fileBasicInfoClass fileInformationClass = iota
|
||||
)
|
||||
|
||||
func getFileInformationByHandleEx(handle syscall.Handle, data *fileBasicInfo) (err error) {
|
||||
if findProcErr != nil {
|
||||
return findProcErr
|
||||
}
|
||||
|
||||
r1, _, e1 := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(), 4, uintptr(handle), uintptr(fileBasicInfoClass), uintptr(unsafe.Pointer(data)), unsafe.Sizeof(*data), 0, 0)
|
||||
if r1 == 0 {
|
||||
err = syscall.EINVAL
|
||||
if e1 != 0 {
|
||||
err = error(e1)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
FROM golang:1.11
|
||||
|
||||
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash
|
||||
RUN apt-get install --yes nodejs
|
||||
|
||||
WORKDIR /go/src/github.com/djherbis/times
|
||||
COPY . .
|
||||
|
||||
RUN GOOS=js GOARCH=wasm go test -covermode=count -coverprofile=profile.cov -exec="$(go env GOROOT)/misc/wasm/go_js_wasm_exec"
|
|
@ -0,0 +1,7 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
docker build -f js.cover.dockerfile -t js.cover.djherbis.times .
|
||||
docker create --name js.cover.djherbis.times js.cover.djherbis.times
|
||||
docker cp js.cover.djherbis.times:/go/src/github.com/djherbis/times/profile.cov .
|
||||
docker rm -v js.cover.djherbis.times
|
|
@ -0,0 +1,74 @@
|
|||
// Package times provides a platform-independent way to get atime, mtime, ctime and btime for files.
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Get returns the Timespec for the given FileInfo
|
||||
func Get(fi os.FileInfo) Timespec {
|
||||
return getTimespec(fi)
|
||||
}
|
||||
|
||||
type statFunc func(string) (os.FileInfo, error)
|
||||
|
||||
func stat(name string, sf statFunc) (Timespec, error) {
|
||||
fi, err := sf(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return getTimespec(fi), nil
|
||||
}
|
||||
|
||||
// Timespec provides access to file times.
|
||||
// ChangeTime() panics unless HasChangeTime() is true and
|
||||
// BirthTime() panics unless HasBirthTime() is true.
|
||||
type Timespec interface {
|
||||
ModTime() time.Time
|
||||
AccessTime() time.Time
|
||||
ChangeTime() time.Time
|
||||
BirthTime() time.Time
|
||||
HasChangeTime() bool
|
||||
HasBirthTime() bool
|
||||
}
|
||||
|
||||
type atime struct {
|
||||
v time.Time
|
||||
}
|
||||
|
||||
func (a atime) AccessTime() time.Time { return a.v }
|
||||
|
||||
type ctime struct {
|
||||
v time.Time
|
||||
}
|
||||
|
||||
func (ctime) HasChangeTime() bool { return true }
|
||||
|
||||
func (c ctime) ChangeTime() time.Time { return c.v }
|
||||
|
||||
type mtime struct {
|
||||
v time.Time
|
||||
}
|
||||
|
||||
func (m mtime) ModTime() time.Time { return m.v }
|
||||
|
||||
type btime struct {
|
||||
v time.Time
|
||||
}
|
||||
|
||||
func (btime) HasBirthTime() bool { return true }
|
||||
|
||||
func (b btime) BirthTime() time.Time { return b.v }
|
||||
|
||||
type noctime struct{}
|
||||
|
||||
func (noctime) HasChangeTime() bool { return false }
|
||||
|
||||
func (noctime) ChangeTime() time.Time { panic("ctime not available") }
|
||||
|
||||
type nobtime struct{}
|
||||
|
||||
func (nobtime) HasBirthTime() bool { return false }
|
||||
|
||||
func (nobtime) BirthTime() time.Time { panic("birthtime not available") }
|
|
@ -0,0 +1,40 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_darwin.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = true
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
btime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atimespec)
|
||||
t.mtime.v = timespecToTime(stat.Mtimespec)
|
||||
t.ctime.v = timespecToTime(stat.Ctimespec)
|
||||
t.btime.v = timespecToTime(stat.Birthtimespec)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_dragonfly.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atim)
|
||||
t.mtime.v = timespecToTime(stat.Mtim)
|
||||
t.ctime.v = timespecToTime(stat.Ctim)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_freebsd.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = true
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
btime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atimespec)
|
||||
t.mtime.v = timespecToTime(stat.Mtimespec)
|
||||
t.ctime.v = timespecToTime(stat.Ctimespec)
|
||||
t.btime.v = timespecToTime(stat.Birthtimespec)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// https://golang.org/src/os/stat_nacljs.go
|
||||
|
||||
// +build js,wasm
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(sec, nsec int64) time.Time {
|
||||
return time.Unix(sec, nsec)
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atime, stat.AtimeNsec)
|
||||
t.mtime.v = timespecToTime(stat.Mtime, stat.MtimeNsec)
|
||||
t.ctime.v = timespecToTime(stat.Ctime, stat.CtimeNsec)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_linux.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atim)
|
||||
t.mtime.v = timespecToTime(stat.Mtim)
|
||||
t.ctime.v = timespecToTime(stat.Ctim)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// https://golang.org/src/os/stat_nacljs.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(sec, nsec int64) time.Time {
|
||||
return time.Unix(sec, nsec)
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atime, stat.AtimeNsec)
|
||||
t.mtime.v = timespecToTime(stat.Mtime, stat.MtimeNsec)
|
||||
t.ctime.v = timespecToTime(stat.Ctime, stat.CtimeNsec)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_netbsd.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = true
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
btime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atimespec)
|
||||
t.mtime.v = timespecToTime(stat.Mtimespec)
|
||||
t.ctime.v = timespecToTime(stat.Ctimespec)
|
||||
t.btime.v = timespecToTime(stat.Birthtimespec)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_openbsd.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atim)
|
||||
t.mtime.v = timespecToTime(stat.Mtim)
|
||||
t.ctime.v = timespecToTime(stat.Ctim)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_plan9.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = false
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
noctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Dir)
|
||||
t.atime.v = time.Unix(int64(stat.Atime), 0)
|
||||
t.mtime.v = time.Unix(int64(stat.Mtime), 0)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_solaris.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = true
|
||||
HasBirthTime = false
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
ctime
|
||||
nobtime
|
||||
}
|
||||
|
||||
func timespecToTime(ts syscall.Timespec) time.Time {
|
||||
return time.Unix(int64(ts.Sec), int64(ts.Nsec))
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) (t timespec) {
|
||||
stat := fi.Sys().(*syscall.Stat_t)
|
||||
t.atime.v = timespecToTime(stat.Atim)
|
||||
t.mtime.v = timespecToTime(stat.Mtim)
|
||||
t.ctime.v = timespecToTime(stat.Ctim)
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// http://golang.org/src/os/stat_windows.go
|
||||
|
||||
package times
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HasChangeTime and HasBirthTime are true if and only if
|
||||
// the target OS supports them.
|
||||
const (
|
||||
HasChangeTime = false
|
||||
HasBirthTime = true
|
||||
)
|
||||
|
||||
type timespec struct {
|
||||
atime
|
||||
mtime
|
||||
noctime
|
||||
btime
|
||||
}
|
||||
|
||||
func getTimespec(fi os.FileInfo) Timespec {
|
||||
var t timespec
|
||||
stat := fi.Sys().(*syscall.Win32FileAttributeData)
|
||||
t.atime.v = time.Unix(0, stat.LastAccessTime.Nanoseconds())
|
||||
t.mtime.v = time.Unix(0, stat.LastWriteTime.Nanoseconds())
|
||||
t.btime.v = time.Unix(0, stat.CreationTime.Nanoseconds())
|
||||
return t
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
// +build !windows
|
||||
|
||||
package times
|
||||
|
||||
import "os"
|
||||
|
||||
// Stat returns the Timespec for the given filename.
|
||||
func Stat(name string) (Timespec, error) {
|
||||
return stat(name, os.Stat)
|
||||
}
|
||||
|
||||
// Lstat returns the Timespec for the given filename, and does not follow Symlinks.
|
||||
func Lstat(name string) (Timespec, error) {
|
||||
return stat(name, os.Lstat)
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
MIT LICENSE
|
||||
|
||||
Copyright 2019 Dustin Oprea
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,659 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
ifdLogger = log.NewLogger("exifcommon.ifd")
|
||||
)
|
||||
|
||||
var (
|
||||
ErrChildIfdNotMapped = errors.New("no child-IFD for that tag-ID under parent")
|
||||
)
|
||||
|
||||
// MappedIfd is one node in the IFD-mapping.
|
||||
type MappedIfd struct {
|
||||
ParentTagId uint16
|
||||
Placement []uint16
|
||||
Path []string
|
||||
|
||||
Name string
|
||||
TagId uint16
|
||||
Children map[uint16]*MappedIfd
|
||||
}
|
||||
|
||||
// String returns a descriptive string.
|
||||
func (mi *MappedIfd) String() string {
|
||||
pathPhrase := mi.PathPhrase()
|
||||
return fmt.Sprintf("MappedIfd<(0x%04X) [%s] PATH=[%s]>", mi.TagId, mi.Name, pathPhrase)
|
||||
}
|
||||
|
||||
// PathPhrase returns a non-fully-qualified IFD path.
|
||||
func (mi *MappedIfd) PathPhrase() string {
|
||||
return strings.Join(mi.Path, "/")
|
||||
}
|
||||
|
||||
// TODO(dustin): Refactor this to use IfdIdentity structs.
|
||||
|
||||
// IfdMapping describes all of the IFDs that we currently recognize.
|
||||
type IfdMapping struct {
|
||||
rootNode *MappedIfd
|
||||
}
|
||||
|
||||
// NewIfdMapping returns a new IfdMapping struct.
|
||||
func NewIfdMapping() (ifdMapping *IfdMapping) {
|
||||
rootNode := &MappedIfd{
|
||||
Path: make([]string, 0),
|
||||
Children: make(map[uint16]*MappedIfd),
|
||||
}
|
||||
|
||||
return &IfdMapping{
|
||||
rootNode: rootNode,
|
||||
}
|
||||
}
|
||||
|
||||
// NewIfdMappingWithStandard retruns a new IfdMapping struct preloaded with the
|
||||
// standard IFDs.
|
||||
func NewIfdMappingWithStandard() (ifdMapping *IfdMapping) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err := log.Wrap(state.(error))
|
||||
log.Panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
||||
err := LoadStandardIfds(im)
|
||||
log.PanicIf(err)
|
||||
|
||||
return im
|
||||
}
|
||||
|
||||
// Get returns the node given the path slice.
|
||||
func (im *IfdMapping) Get(parentPlacement []uint16) (childIfd *MappedIfd, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ptr := im.rootNode
|
||||
for _, tagId := range parentPlacement {
|
||||
if descendantPtr, found := ptr.Children[tagId]; found == false {
|
||||
log.Panicf("ifd child with tag-ID (%04x) not registered: [%s]", tagId, ptr.PathPhrase())
|
||||
} else {
|
||||
ptr = descendantPtr
|
||||
}
|
||||
}
|
||||
|
||||
return ptr, nil
|
||||
}
|
||||
|
||||
// GetWithPath returns the node given the path string.
|
||||
func (im *IfdMapping) GetWithPath(pathPhrase string) (mi *MappedIfd, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if pathPhrase == "" {
|
||||
log.Panicf("path-phrase is empty")
|
||||
}
|
||||
|
||||
path := strings.Split(pathPhrase, "/")
|
||||
ptr := im.rootNode
|
||||
|
||||
for _, name := range path {
|
||||
var hit *MappedIfd
|
||||
for _, mi := range ptr.Children {
|
||||
if mi.Name == name {
|
||||
hit = mi
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if hit == nil {
|
||||
log.Panicf("ifd child with name [%s] not registered: [%s]", name, ptr.PathPhrase())
|
||||
}
|
||||
|
||||
ptr = hit
|
||||
}
|
||||
|
||||
return ptr, nil
|
||||
}
|
||||
|
||||
// GetChild is a convenience function to get the child path for a given parent
|
||||
// placement and child tag-ID.
|
||||
func (im *IfdMapping) GetChild(parentPathPhrase string, tagId uint16) (mi *MappedIfd, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
mi, err = im.GetWithPath(parentPathPhrase)
|
||||
log.PanicIf(err)
|
||||
|
||||
for _, childMi := range mi.Children {
|
||||
if childMi.TagId == tagId {
|
||||
return childMi, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Whether or not an IFD is defined in data, such an IFD is not registered
|
||||
// and would be unknown.
|
||||
log.Panic(ErrChildIfdNotMapped)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// IfdTagIdAndIndex represents a specific part of the IFD path.
|
||||
//
|
||||
// This is a legacy type.
|
||||
type IfdTagIdAndIndex struct {
|
||||
Name string
|
||||
TagId uint16
|
||||
Index int
|
||||
}
|
||||
|
||||
// String returns a descriptive string.
|
||||
func (itii IfdTagIdAndIndex) String() string {
|
||||
return fmt.Sprintf("IfdTagIdAndIndex<NAME=[%s] ID=(%04x) INDEX=(%d)>", itii.Name, itii.TagId, itii.Index)
|
||||
}
|
||||
|
||||
// ResolvePath takes a list of names, which can also be suffixed with indices
|
||||
// (to identify the second, third, etc.. sibling IFD) and returns a list of
|
||||
// tag-IDs and those indices.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// - IFD/Exif/Iop
|
||||
// - IFD0/Exif/Iop
|
||||
//
|
||||
// This is the only call that supports adding the numeric indices.
|
||||
func (im *IfdMapping) ResolvePath(pathPhrase string) (lineage []IfdTagIdAndIndex, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
pathPhrase = strings.TrimSpace(pathPhrase)
|
||||
|
||||
if pathPhrase == "" {
|
||||
log.Panicf("can not resolve empty path-phrase")
|
||||
}
|
||||
|
||||
path := strings.Split(pathPhrase, "/")
|
||||
lineage = make([]IfdTagIdAndIndex, len(path))
|
||||
|
||||
ptr := im.rootNode
|
||||
empty := IfdTagIdAndIndex{}
|
||||
for i, name := range path {
|
||||
indexByte := name[len(name)-1]
|
||||
index := 0
|
||||
if indexByte >= '0' && indexByte <= '9' {
|
||||
index = int(indexByte - '0')
|
||||
name = name[:len(name)-1]
|
||||
}
|
||||
|
||||
itii := IfdTagIdAndIndex{}
|
||||
for _, mi := range ptr.Children {
|
||||
if mi.Name != name {
|
||||
continue
|
||||
}
|
||||
|
||||
itii.Name = name
|
||||
itii.TagId = mi.TagId
|
||||
itii.Index = index
|
||||
|
||||
ptr = mi
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
if itii == empty {
|
||||
log.Panicf("ifd child with name [%s] not registered: [%s]", name, pathPhrase)
|
||||
}
|
||||
|
||||
lineage[i] = itii
|
||||
}
|
||||
|
||||
return lineage, nil
|
||||
}
|
||||
|
||||
// FqPathPhraseFromLineage returns the fully-qualified IFD path from the slice.
|
||||
func (im *IfdMapping) FqPathPhraseFromLineage(lineage []IfdTagIdAndIndex) (fqPathPhrase string) {
|
||||
fqPathParts := make([]string, len(lineage))
|
||||
for i, itii := range lineage {
|
||||
if itii.Index > 0 {
|
||||
fqPathParts[i] = fmt.Sprintf("%s%d", itii.Name, itii.Index)
|
||||
} else {
|
||||
fqPathParts[i] = itii.Name
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(fqPathParts, "/")
|
||||
}
|
||||
|
||||
// PathPhraseFromLineage returns the non-fully-qualified IFD path from the
|
||||
// slice.
|
||||
func (im *IfdMapping) PathPhraseFromLineage(lineage []IfdTagIdAndIndex) (pathPhrase string) {
|
||||
pathParts := make([]string, len(lineage))
|
||||
for i, itii := range lineage {
|
||||
pathParts[i] = itii.Name
|
||||
}
|
||||
|
||||
return strings.Join(pathParts, "/")
|
||||
}
|
||||
|
||||
// StripPathPhraseIndices returns a non-fully-qualified path-phrase (no
|
||||
// indices).
|
||||
func (im *IfdMapping) StripPathPhraseIndices(pathPhrase string) (strippedPathPhrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
lineage, err := im.ResolvePath(pathPhrase)
|
||||
log.PanicIf(err)
|
||||
|
||||
strippedPathPhrase = im.PathPhraseFromLineage(lineage)
|
||||
return strippedPathPhrase, nil
|
||||
}
|
||||
|
||||
// Add puts the given IFD at the given position of the tree. The position of the
|
||||
// tree is referred to as the placement and is represented by a set of tag-IDs,
|
||||
// where the leftmost is the root tag and the tags going to the right are
|
||||
// progressive descendants.
|
||||
func (im *IfdMapping) Add(parentPlacement []uint16, tagId uint16, name string) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): !! It would be nicer to provide a list of names in the placement rather than tag-IDs.
|
||||
|
||||
ptr, err := im.Get(parentPlacement)
|
||||
log.PanicIf(err)
|
||||
|
||||
path := make([]string, len(parentPlacement)+1)
|
||||
if len(parentPlacement) > 0 {
|
||||
copy(path, ptr.Path)
|
||||
}
|
||||
|
||||
path[len(path)-1] = name
|
||||
|
||||
placement := make([]uint16, len(parentPlacement)+1)
|
||||
if len(placement) > 0 {
|
||||
copy(placement, ptr.Placement)
|
||||
}
|
||||
|
||||
placement[len(placement)-1] = tagId
|
||||
|
||||
childIfd := &MappedIfd{
|
||||
ParentTagId: ptr.TagId,
|
||||
Path: path,
|
||||
Placement: placement,
|
||||
Name: name,
|
||||
TagId: tagId,
|
||||
Children: make(map[uint16]*MappedIfd),
|
||||
}
|
||||
|
||||
if _, found := ptr.Children[tagId]; found == true {
|
||||
log.Panicf("child IFD with tag-ID (%04x) already registered under IFD [%s] with tag-ID (%04x)", tagId, ptr.Name, ptr.TagId)
|
||||
}
|
||||
|
||||
ptr.Children[tagId] = childIfd
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (im *IfdMapping) dumpLineages(stack []*MappedIfd, input []string) (output []string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
currentIfd := stack[len(stack)-1]
|
||||
|
||||
output = input
|
||||
for _, childIfd := range currentIfd.Children {
|
||||
stackCopy := make([]*MappedIfd, len(stack)+1)
|
||||
|
||||
copy(stackCopy, stack)
|
||||
stackCopy[len(stack)] = childIfd
|
||||
|
||||
// Add to output, but don't include the obligatory root node.
|
||||
parts := make([]string, len(stackCopy)-1)
|
||||
for i, mi := range stackCopy[1:] {
|
||||
parts[i] = mi.Name
|
||||
}
|
||||
|
||||
output = append(output, strings.Join(parts, "/"))
|
||||
|
||||
output, err = im.dumpLineages(stackCopy, output)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
// DumpLineages returns a slice of strings representing all mappings.
|
||||
func (im *IfdMapping) DumpLineages() (output []string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
stack := []*MappedIfd{im.rootNode}
|
||||
output = make([]string, 0)
|
||||
|
||||
output, err = im.dumpLineages(stack, output)
|
||||
log.PanicIf(err)
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
// LoadStandardIfds loads the standard IFDs into the mapping.
|
||||
func LoadStandardIfds(im *IfdMapping) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
err = im.Add(
|
||||
[]uint16{},
|
||||
IfdStandardIfdIdentity.TagId(), IfdStandardIfdIdentity.Name())
|
||||
|
||||
log.PanicIf(err)
|
||||
|
||||
err = im.Add(
|
||||
[]uint16{IfdStandardIfdIdentity.TagId()},
|
||||
IfdExifStandardIfdIdentity.TagId(), IfdExifStandardIfdIdentity.Name())
|
||||
|
||||
log.PanicIf(err)
|
||||
|
||||
err = im.Add(
|
||||
[]uint16{IfdStandardIfdIdentity.TagId(), IfdExifStandardIfdIdentity.TagId()},
|
||||
IfdExifIopStandardIfdIdentity.TagId(), IfdExifIopStandardIfdIdentity.Name())
|
||||
|
||||
log.PanicIf(err)
|
||||
|
||||
err = im.Add(
|
||||
[]uint16{IfdStandardIfdIdentity.TagId()},
|
||||
IfdGpsInfoStandardIfdIdentity.TagId(), IfdGpsInfoStandardIfdIdentity.Name())
|
||||
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// IfdTag describes a single IFD tag and its parent (if any).
|
||||
type IfdTag struct {
|
||||
parentIfdTag *IfdTag
|
||||
tagId uint16
|
||||
name string
|
||||
}
|
||||
|
||||
func NewIfdTag(parentIfdTag *IfdTag, tagId uint16, name string) IfdTag {
|
||||
return IfdTag{
|
||||
parentIfdTag: parentIfdTag,
|
||||
tagId: tagId,
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
|
||||
// ParentIfd returns the IfdTag of this IFD's parent.
|
||||
func (it IfdTag) ParentIfd() *IfdTag {
|
||||
return it.parentIfdTag
|
||||
}
|
||||
|
||||
// TagId returns the tag-ID of this IFD.
|
||||
func (it IfdTag) TagId() uint16 {
|
||||
return it.tagId
|
||||
}
|
||||
|
||||
// Name returns the simple name of this IFD.
|
||||
func (it IfdTag) Name() string {
|
||||
return it.name
|
||||
}
|
||||
|
||||
// String returns a descriptive string.
|
||||
func (it IfdTag) String() string {
|
||||
parentIfdPhrase := ""
|
||||
if it.parentIfdTag != nil {
|
||||
parentIfdPhrase = fmt.Sprintf(" PARENT=(0x%04x)[%s]", it.parentIfdTag.tagId, it.parentIfdTag.name)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("IfdTag<TAG-ID=(0x%04x) NAME=[%s]%s>", it.tagId, it.name, parentIfdPhrase)
|
||||
}
|
||||
|
||||
var (
|
||||
// rootStandardIfd is the standard root IFD.
|
||||
rootStandardIfd = NewIfdTag(nil, 0x0000, "IFD") // IFD
|
||||
|
||||
// exifStandardIfd is the standard "Exif" IFD.
|
||||
exifStandardIfd = NewIfdTag(&rootStandardIfd, 0x8769, "Exif") // IFD/Exif
|
||||
|
||||
// iopStandardIfd is the standard "Iop" IFD.
|
||||
iopStandardIfd = NewIfdTag(&exifStandardIfd, 0xA005, "Iop") // IFD/Exif/Iop
|
||||
|
||||
// gpsInfoStandardIfd is the standard "GPS" IFD.
|
||||
gpsInfoStandardIfd = NewIfdTag(&rootStandardIfd, 0x8825, "GPSInfo") // IFD/GPSInfo
|
||||
)
|
||||
|
||||
// IfdIdentityPart represents one component in an IFD path.
|
||||
type IfdIdentityPart struct {
|
||||
Name string
|
||||
Index int
|
||||
}
|
||||
|
||||
// String returns a fully-qualified IFD path.
|
||||
func (iip IfdIdentityPart) String() string {
|
||||
if iip.Index > 0 {
|
||||
return fmt.Sprintf("%s%d", iip.Name, iip.Index)
|
||||
} else {
|
||||
return iip.Name
|
||||
}
|
||||
}
|
||||
|
||||
// UnindexedString returned a non-fully-qualified IFD path.
|
||||
func (iip IfdIdentityPart) UnindexedString() string {
|
||||
return iip.Name
|
||||
}
|
||||
|
||||
// IfdIdentity represents a single IFD path and provides access to various
|
||||
// information and representations.
|
||||
//
|
||||
// Only global instances can be used for equality checks.
|
||||
type IfdIdentity struct {
|
||||
ifdTag IfdTag
|
||||
parts []IfdIdentityPart
|
||||
ifdPath string
|
||||
fqIfdPath string
|
||||
}
|
||||
|
||||
// NewIfdIdentity returns a new IfdIdentity struct.
|
||||
func NewIfdIdentity(ifdTag IfdTag, parts ...IfdIdentityPart) (ii *IfdIdentity) {
|
||||
ii = &IfdIdentity{
|
||||
ifdTag: ifdTag,
|
||||
parts: parts,
|
||||
}
|
||||
|
||||
ii.ifdPath = ii.getIfdPath()
|
||||
ii.fqIfdPath = ii.getFqIfdPath()
|
||||
|
||||
return ii
|
||||
}
|
||||
|
||||
// NewIfdIdentityFromString parses a string like "IFD/Exif" or "IFD1" or
|
||||
// something more exotic with custom IFDs ("SomeIFD4/SomeChildIFD6"). Note that
|
||||
// this will valid the unindexed IFD structure (because the standard tags from
|
||||
// the specification are unindexed), but not, obviously, any indices (e.g.
|
||||
// the numbers in "IFD0", "IFD1", "SomeIFD4/SomeChildIFD6"). It is
|
||||
// required for the caller to check whether these specific instances
|
||||
// were actually parsed out of the stream.
|
||||
func NewIfdIdentityFromString(im *IfdMapping, fqIfdPath string) (ii *IfdIdentity, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
lineage, err := im.ResolvePath(fqIfdPath)
|
||||
log.PanicIf(err)
|
||||
|
||||
var lastIt *IfdTag
|
||||
identityParts := make([]IfdIdentityPart, len(lineage))
|
||||
for i, itii := range lineage {
|
||||
// Build out the tag that will eventually point to the IFD represented
|
||||
// by the right-most part in the IFD path.
|
||||
|
||||
it := &IfdTag{
|
||||
parentIfdTag: lastIt,
|
||||
tagId: itii.TagId,
|
||||
name: itii.Name,
|
||||
}
|
||||
|
||||
lastIt = it
|
||||
|
||||
// Create the next IfdIdentity part.
|
||||
|
||||
iip := IfdIdentityPart{
|
||||
Name: itii.Name,
|
||||
Index: itii.Index,
|
||||
}
|
||||
|
||||
identityParts[i] = iip
|
||||
}
|
||||
|
||||
ii = NewIfdIdentity(*lastIt, identityParts...)
|
||||
return ii, nil
|
||||
}
|
||||
|
||||
func (ii *IfdIdentity) getFqIfdPath() string {
|
||||
partPhrases := make([]string, len(ii.parts))
|
||||
for i, iip := range ii.parts {
|
||||
partPhrases[i] = iip.String()
|
||||
}
|
||||
|
||||
return strings.Join(partPhrases, "/")
|
||||
}
|
||||
|
||||
func (ii *IfdIdentity) getIfdPath() string {
|
||||
partPhrases := make([]string, len(ii.parts))
|
||||
for i, iip := range ii.parts {
|
||||
partPhrases[i] = iip.UnindexedString()
|
||||
}
|
||||
|
||||
return strings.Join(partPhrases, "/")
|
||||
}
|
||||
|
||||
// String returns a fully-qualified IFD path.
|
||||
func (ii *IfdIdentity) String() string {
|
||||
return ii.fqIfdPath
|
||||
}
|
||||
|
||||
// UnindexedString returns a non-fully-qualified IFD path.
|
||||
func (ii *IfdIdentity) UnindexedString() string {
|
||||
return ii.ifdPath
|
||||
}
|
||||
|
||||
// IfdTag returns the tag struct behind this IFD.
|
||||
func (ii *IfdIdentity) IfdTag() IfdTag {
|
||||
return ii.ifdTag
|
||||
}
|
||||
|
||||
// TagId returns the tag-ID of the IFD.
|
||||
func (ii *IfdIdentity) TagId() uint16 {
|
||||
return ii.ifdTag.TagId()
|
||||
}
|
||||
|
||||
// LeafPathPart returns the last right-most path-part, which represents the
|
||||
// current IFD.
|
||||
func (ii *IfdIdentity) LeafPathPart() IfdIdentityPart {
|
||||
return ii.parts[len(ii.parts)-1]
|
||||
}
|
||||
|
||||
// Name returns the simple name of this IFD.
|
||||
func (ii *IfdIdentity) Name() string {
|
||||
return ii.LeafPathPart().Name
|
||||
}
|
||||
|
||||
// Index returns the index of this IFD (more then one IFD under a parent IFD
|
||||
// will be numbered [0..n]).
|
||||
func (ii *IfdIdentity) Index() int {
|
||||
return ii.LeafPathPart().Index
|
||||
}
|
||||
|
||||
// Equals returns true if the two IfdIdentity instances are effectively
|
||||
// identical.
|
||||
//
|
||||
// Since there's no way to get a specific fully-qualified IFD path without a
|
||||
// certain slice of parts and all other fields are also derived from this,
|
||||
// checking that the fully-qualified IFD path is equals is sufficient.
|
||||
func (ii *IfdIdentity) Equals(ii2 *IfdIdentity) bool {
|
||||
return ii.String() == ii2.String()
|
||||
}
|
||||
|
||||
// NewChild creates an IfdIdentity for an IFD that is a child of the current
|
||||
// IFD.
|
||||
func (ii *IfdIdentity) NewChild(childIfdTag IfdTag, index int) (iiChild *IfdIdentity) {
|
||||
if *childIfdTag.parentIfdTag != ii.ifdTag {
|
||||
log.Panicf("can not add child; we are not the parent:\nUS=%v\nCHILD=%v", ii.ifdTag, childIfdTag)
|
||||
}
|
||||
|
||||
childPart := IfdIdentityPart{childIfdTag.name, index}
|
||||
childParts := append(ii.parts, childPart)
|
||||
|
||||
iiChild = NewIfdIdentity(childIfdTag, childParts...)
|
||||
return iiChild
|
||||
}
|
||||
|
||||
// NewSibling creates an IfdIdentity for an IFD that is a sibling to the current
|
||||
// one.
|
||||
func (ii *IfdIdentity) NewSibling(index int) (iiSibling *IfdIdentity) {
|
||||
parts := make([]IfdIdentityPart, len(ii.parts))
|
||||
|
||||
copy(parts, ii.parts)
|
||||
parts[len(parts)-1].Index = index
|
||||
|
||||
iiSibling = NewIfdIdentity(ii.ifdTag, parts...)
|
||||
return iiSibling
|
||||
}
|
||||
|
||||
var (
|
||||
// IfdStandardIfdIdentity represents the IFD path for IFD0.
|
||||
IfdStandardIfdIdentity = NewIfdIdentity(rootStandardIfd, IfdIdentityPart{"IFD", 0})
|
||||
|
||||
// IfdExifStandardIfdIdentity represents the IFD path for IFD0/Exif0.
|
||||
IfdExifStandardIfdIdentity = IfdStandardIfdIdentity.NewChild(exifStandardIfd, 0)
|
||||
|
||||
// IfdExifIopStandardIfdIdentity represents the IFD path for IFD0/Exif0/Iop0.
|
||||
IfdExifIopStandardIfdIdentity = IfdExifStandardIfdIdentity.NewChild(iopStandardIfd, 0)
|
||||
|
||||
// IfdGPSInfoStandardIfdIdentity represents the IFD path for IFD0/GPSInfo0.
|
||||
IfdGpsInfoStandardIfdIdentity = IfdStandardIfdIdentity.NewChild(gpsInfoStandardIfd, 0)
|
||||
|
||||
// Ifd1StandardIfdIdentity represents the IFD path for IFD1.
|
||||
Ifd1StandardIfdIdentity = NewIfdIdentity(rootStandardIfd, IfdIdentityPart{"IFD", 1})
|
||||
)
|
||||
|
||||
var (
|
||||
IfdPathStandard = IfdStandardIfdIdentity
|
||||
IfdPathStandardExif = IfdExifStandardIfdIdentity
|
||||
IfdPathStandardExifIop = IfdExifIopStandardIfdIdentity
|
||||
IfdPathStandardGps = IfdGpsInfoStandardIfdIdentity
|
||||
)
|
|
@ -0,0 +1,219 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
parserLogger = log.NewLogger("exifcommon.parser")
|
||||
)
|
||||
|
||||
// Parser knows how to parse all well-defined, encoded EXIF types.
|
||||
type Parser struct {
|
||||
}
|
||||
|
||||
// ParseBytesknows how to parse a byte-type value.
|
||||
func (p *Parser) ParseBytes(data []byte, unitCount uint32) (value []uint8, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeByte.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = []uint8(data[:count])
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseAscii returns a string and auto-strips the trailing NUL character that
|
||||
// should be at the end of the encoding.
|
||||
func (p *Parser) ParseAscii(data []byte, unitCount uint32) (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeAscii.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
if len(data) == 0 || data[count-1] != 0 {
|
||||
s := string(data[:count])
|
||||
parserLogger.Warningf(nil, "ascii not terminated with nul as expected: [%v]", s)
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// Auto-strip the NUL from the end. It serves no purpose outside of
|
||||
// encoding semantics.
|
||||
|
||||
return string(data[:count-1]), nil
|
||||
}
|
||||
|
||||
// ParseAsciiNoNul returns a string without any consideration for a trailing NUL
|
||||
// character.
|
||||
func (p *Parser) ParseAsciiNoNul(data []byte, unitCount uint32) (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeAscii.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
return string(data[:count]), nil
|
||||
}
|
||||
|
||||
// ParseShorts knows how to parse an encoded list of shorts.
|
||||
func (p *Parser) ParseShorts(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []uint16, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeShort.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = make([]uint16, count)
|
||||
for i := 0; i < count; i++ {
|
||||
value[i] = byteOrder.Uint16(data[i*2:])
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseLongs knows how to encode an encoded list of unsigned longs.
|
||||
func (p *Parser) ParseLongs(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeLong.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = make([]uint32, count)
|
||||
for i := 0; i < count; i++ {
|
||||
value[i] = byteOrder.Uint32(data[i*4:])
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseRationals knows how to parse an encoded list of unsigned rationals.
|
||||
func (p *Parser) ParseRationals(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []Rational, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeRational.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = make([]Rational, count)
|
||||
for i := 0; i < count; i++ {
|
||||
value[i].Numerator = byteOrder.Uint32(data[i*8:])
|
||||
value[i].Denominator = byteOrder.Uint32(data[i*8+4:])
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseSignedLongs knows how to parse an encoded list of signed longs.
|
||||
func (p *Parser) ParseSignedLongs(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []int32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeSignedLong.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer(data)
|
||||
|
||||
value = make([]int32, count)
|
||||
for i := 0; i < count; i++ {
|
||||
err := binary.Read(b, byteOrder, &value[i])
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseSignedRationals knows how to parse an encoded list of signed
|
||||
// rationals.
|
||||
func (p *Parser) ParseSignedRationals(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []SignedRational, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) < (TypeSignedRational.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer(data)
|
||||
|
||||
value = make([]SignedRational, count)
|
||||
for i := 0; i < count; i++ {
|
||||
err = binary.Read(b, byteOrder, &value[i].Numerator)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Read(b, byteOrder, &value[i].Denominator)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"encoding/binary"
|
||||
"io/ioutil"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
moduleRootPath = ""
|
||||
|
||||
testExifData []byte = nil
|
||||
|
||||
// EncodeDefaultByteOrder is the default byte-order for encoding operations.
|
||||
EncodeDefaultByteOrder = binary.BigEndian
|
||||
|
||||
// Default byte order for tests.
|
||||
TestDefaultByteOrder = binary.BigEndian
|
||||
)
|
||||
|
||||
func GetModuleRootPath() string {
|
||||
if moduleRootPath == "" {
|
||||
moduleRootPath = os.Getenv("EXIF_MODULE_ROOT_PATH")
|
||||
if moduleRootPath != "" {
|
||||
return moduleRootPath
|
||||
}
|
||||
|
||||
currentWd, err := os.Getwd()
|
||||
log.PanicIf(err)
|
||||
|
||||
currentPath := currentWd
|
||||
|
||||
visited := make([]string, 0)
|
||||
|
||||
for {
|
||||
tryStampFilepath := path.Join(currentPath, ".MODULE_ROOT")
|
||||
|
||||
_, err := os.Stat(tryStampFilepath)
|
||||
if err != nil && os.IsNotExist(err) != true {
|
||||
log.Panic(err)
|
||||
} else if err == nil {
|
||||
break
|
||||
}
|
||||
|
||||
visited = append(visited, tryStampFilepath)
|
||||
|
||||
currentPath = path.Dir(currentPath)
|
||||
if currentPath == "/" {
|
||||
log.Panicf("could not find module-root: %v", visited)
|
||||
}
|
||||
}
|
||||
|
||||
moduleRootPath = currentPath
|
||||
}
|
||||
|
||||
return moduleRootPath
|
||||
}
|
||||
|
||||
func GetTestAssetsPath() string {
|
||||
moduleRootPath := GetModuleRootPath()
|
||||
assetsPath := path.Join(moduleRootPath, "assets")
|
||||
|
||||
return assetsPath
|
||||
}
|
||||
|
||||
func getTestImageFilepath() string {
|
||||
assetsPath := GetTestAssetsPath()
|
||||
testImageFilepath := path.Join(assetsPath, "NDM_8901.jpg")
|
||||
return testImageFilepath
|
||||
}
|
||||
|
||||
func getTestExifData() []byte {
|
||||
if testExifData == nil {
|
||||
assetsPath := GetTestAssetsPath()
|
||||
filepath := path.Join(assetsPath, "NDM_8901.jpg.exif")
|
||||
|
||||
var err error
|
||||
|
||||
testExifData, err = ioutil.ReadFile(filepath)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return testExifData
|
||||
}
|
|
@ -0,0 +1,452 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
typeLogger = log.NewLogger("exif.type")
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrNotEnoughData is used when there isn't enough data to accommodate what
|
||||
// we're trying to parse (sizeof(type) * unit_count).
|
||||
ErrNotEnoughData = errors.New("not enough data for type")
|
||||
|
||||
// ErrWrongType is used when we try to parse anything other than the
|
||||
// current type.
|
||||
ErrWrongType = errors.New("wrong type, can not parse")
|
||||
|
||||
// ErrUnhandledUndefinedTypedTag is used when we try to parse a tag that's
|
||||
// recorded as an "unknown" type but not a documented tag (therefore
|
||||
// leaving us not knowning how to read it).
|
||||
ErrUnhandledUndefinedTypedTag = errors.New("not a standard unknown-typed tag")
|
||||
)
|
||||
|
||||
// TagTypePrimitive is a type-alias that let's us easily lookup type properties.
|
||||
type TagTypePrimitive uint16
|
||||
|
||||
const (
|
||||
// TypeByte describes an encoded list of bytes.
|
||||
TypeByte TagTypePrimitive = 1
|
||||
|
||||
// TypeAscii describes an encoded list of characters that is terminated
|
||||
// with a NUL in its encoded form.
|
||||
TypeAscii TagTypePrimitive = 2
|
||||
|
||||
// TypeShort describes an encoded list of shorts.
|
||||
TypeShort TagTypePrimitive = 3
|
||||
|
||||
// TypeLong describes an encoded list of longs.
|
||||
TypeLong TagTypePrimitive = 4
|
||||
|
||||
// TypeRational describes an encoded list of rationals.
|
||||
TypeRational TagTypePrimitive = 5
|
||||
|
||||
// TypeUndefined describes an encoded value that has a complex/non-clearcut
|
||||
// interpretation.
|
||||
TypeUndefined TagTypePrimitive = 7
|
||||
|
||||
// We've seen type-8, but have no documentation on it.
|
||||
|
||||
// TypeSignedLong describes an encoded list of signed longs.
|
||||
TypeSignedLong TagTypePrimitive = 9
|
||||
|
||||
// TypeSignedRational describes an encoded list of signed rationals.
|
||||
TypeSignedRational TagTypePrimitive = 10
|
||||
|
||||
// TypeAsciiNoNul is just a pseudo-type, for our own purposes.
|
||||
TypeAsciiNoNul TagTypePrimitive = 0xf0
|
||||
)
|
||||
|
||||
// String returns the name of the type
|
||||
func (typeType TagTypePrimitive) String() string {
|
||||
return TypeNames[typeType]
|
||||
}
|
||||
|
||||
// Size returns the size of one atomic unit of the type.
|
||||
func (tagType TagTypePrimitive) Size() int {
|
||||
if tagType == TypeByte {
|
||||
return 1
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
return 1
|
||||
} else if tagType == TypeShort {
|
||||
return 2
|
||||
} else if tagType == TypeLong {
|
||||
return 4
|
||||
} else if tagType == TypeRational {
|
||||
return 8
|
||||
} else if tagType == TypeSignedLong {
|
||||
return 4
|
||||
} else if tagType == TypeSignedRational {
|
||||
return 8
|
||||
} else {
|
||||
log.Panicf("can not determine tag-value size for type (%d): [%s]", tagType, TypeNames[tagType])
|
||||
|
||||
// Never called.
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// IsValid returns true if tagType is a valid type.
|
||||
func (tagType TagTypePrimitive) IsValid() bool {
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
return tagType == TypeByte ||
|
||||
tagType == TypeAscii ||
|
||||
tagType == TypeAsciiNoNul ||
|
||||
tagType == TypeShort ||
|
||||
tagType == TypeLong ||
|
||||
tagType == TypeRational ||
|
||||
tagType == TypeSignedLong ||
|
||||
tagType == TypeSignedRational ||
|
||||
tagType == TypeUndefined
|
||||
}
|
||||
|
||||
var (
|
||||
// TODO(dustin): Rename TypeNames() to typeNames() and add getter.
|
||||
TypeNames = map[TagTypePrimitive]string{
|
||||
TypeByte: "BYTE",
|
||||
TypeAscii: "ASCII",
|
||||
TypeShort: "SHORT",
|
||||
TypeLong: "LONG",
|
||||
TypeRational: "RATIONAL",
|
||||
TypeUndefined: "UNDEFINED",
|
||||
TypeSignedLong: "SLONG",
|
||||
TypeSignedRational: "SRATIONAL",
|
||||
|
||||
TypeAsciiNoNul: "_ASCII_NO_NUL",
|
||||
}
|
||||
|
||||
typeNamesR = map[string]TagTypePrimitive{}
|
||||
)
|
||||
|
||||
// Rational describes an unsigned rational value.
|
||||
type Rational struct {
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator uint32
|
||||
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator uint32
|
||||
}
|
||||
|
||||
// SignedRational describes a signed rational value.
|
||||
type SignedRational struct {
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator int32
|
||||
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator int32
|
||||
}
|
||||
|
||||
// Format returns a stringified value for the given encoding. Automatically
|
||||
// parses. Automatically calculates count based on type size. This function
|
||||
// also supports undefined-type values (the ones that we support, anyway) by
|
||||
// way of the String() method that they all require. We can't be more specific
|
||||
// because we're a base package and we can't refer to it.
|
||||
func FormatFromType(value interface{}, justFirst bool) (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): !! Add test
|
||||
|
||||
switch t := value.(type) {
|
||||
case []byte:
|
||||
return DumpBytesToString(t), nil
|
||||
case string:
|
||||
return t, nil
|
||||
case []uint16:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []uint32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []Rational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case []int32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []SignedRational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case fmt.Stringer:
|
||||
// An undefined value that is documented (or that we otherwise support).
|
||||
return t.String(), nil
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("type can not be formatted into string: %v", reflect.TypeOf(value).Name())
|
||||
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
|
||||
// Format returns a stringified value for the given encoding. Automatically
|
||||
// parses. Automatically calculates count based on type size.
|
||||
func FormatFromBytes(rawBytes []byte, tagType TagTypePrimitive, justFirst bool, byteOrder binary.ByteOrder) (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): !! Add test
|
||||
|
||||
typeSize := tagType.Size()
|
||||
|
||||
if len(rawBytes)%typeSize != 0 {
|
||||
log.Panicf("byte-count (%d) does not align for [%s] type with a size of (%d) bytes", len(rawBytes), TypeNames[tagType], typeSize)
|
||||
}
|
||||
|
||||
// unitCount is the calculated unit-count. This should equal the original
|
||||
// value from the tag (pre-resolution).
|
||||
unitCount := uint32(len(rawBytes) / typeSize)
|
||||
|
||||
// Truncate the items if it's not bytes or a string and we just want the first.
|
||||
|
||||
var value interface{}
|
||||
|
||||
switch tagType {
|
||||
case TypeByte:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseBytes(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAscii:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseAscii(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAsciiNoNul:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseAsciiNoNul(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeShort:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseShorts(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeLong:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeRational:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedLong:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseSignedLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedRational:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseSignedRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("value of type [%s] can not be formatted into string", tagType.String())
|
||||
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
|
||||
phrase, err = FormatFromType(value, justFirst)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// TranslateStringToType converts user-provided strings to properly-typed
|
||||
// values. If a string, returns a string. Else, assumes that it's a single
|
||||
// number. If a list needs to be processed, it is the caller's responsibility to
|
||||
// split it (according to whichever convention has been established).
|
||||
func TranslateStringToType(tagType TagTypePrimitive, valueString string) (value interface{}, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if tagType == TypeUndefined {
|
||||
// The caller should just call String() on the decoded type.
|
||||
log.Panicf("undefined-type values are not supported")
|
||||
}
|
||||
|
||||
if tagType == TypeByte {
|
||||
wide, err := strconv.ParseInt(valueString, 16, 8)
|
||||
log.PanicIf(err)
|
||||
|
||||
return byte(wide), nil
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
// Whether or not we're putting an NUL on the end is only relevant for
|
||||
// byte-level encoding. This function really just supports a user
|
||||
// interface.
|
||||
|
||||
return valueString, nil
|
||||
} else if tagType == TypeShort {
|
||||
n, err := strconv.ParseUint(valueString, 10, 16)
|
||||
log.PanicIf(err)
|
||||
|
||||
return uint16(n), nil
|
||||
} else if tagType == TypeLong {
|
||||
n, err := strconv.ParseUint(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return uint32(n), nil
|
||||
} else if tagType == TypeRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
|
||||
numerator, err := strconv.ParseUint(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
denominator, err := strconv.ParseUint(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return Rational{
|
||||
Numerator: uint32(numerator),
|
||||
Denominator: uint32(denominator),
|
||||
}, nil
|
||||
} else if tagType == TypeSignedLong {
|
||||
n, err := strconv.ParseInt(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return int32(n), nil
|
||||
} else if tagType == TypeSignedRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
|
||||
numerator, err := strconv.ParseInt(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
denominator, err := strconv.ParseInt(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return SignedRational{
|
||||
Numerator: int32(numerator),
|
||||
Denominator: int32(denominator),
|
||||
}, nil
|
||||
}
|
||||
|
||||
log.Panicf("from-string encoding for type not supported; this shouldn't happen: [%s]", tagType.String())
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// GetTypeByName returns the `TagTypePrimitive` for the given type name.
|
||||
// Returns (0) if not valid.
|
||||
func GetTypeByName(typeName string) (tagType TagTypePrimitive, found bool) {
|
||||
tagType, found = typeNamesR[typeName]
|
||||
return tagType, found
|
||||
}
|
||||
|
||||
// BasicTag describes a single tag for any purpose.
|
||||
type BasicTag struct {
|
||||
// FqIfdPath is the fully-qualified IFD-path.
|
||||
FqIfdPath string
|
||||
|
||||
// IfdPath is the unindexed IFD-path.
|
||||
IfdPath string
|
||||
|
||||
// TagId is the tag-ID.
|
||||
TagId uint16
|
||||
}
|
||||
|
||||
func init() {
|
||||
for typeId, typeName := range TypeNames {
|
||||
typeNamesR[typeName] = typeId
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
// DumpBytes prints a list of hex-encoded bytes.
|
||||
func DumpBytes(data []byte) {
|
||||
fmt.Printf("DUMP: ")
|
||||
for _, x := range data {
|
||||
fmt.Printf("%02x ", x)
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
}
|
||||
|
||||
// DumpBytesClause prints a list like DumpBytes(), but encapsulated in
|
||||
// "[]byte { ... }".
|
||||
func DumpBytesClause(data []byte) {
|
||||
fmt.Printf("DUMP: ")
|
||||
|
||||
fmt.Printf("[]byte { ")
|
||||
|
||||
for i, x := range data {
|
||||
fmt.Printf("0x%02x", x)
|
||||
|
||||
if i < len(data)-1 {
|
||||
fmt.Printf(", ")
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf(" }\n")
|
||||
}
|
||||
|
||||
// DumpBytesToString returns a stringified list of hex-encoded bytes.
|
||||
func DumpBytesToString(data []byte) string {
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("%02x", x))
|
||||
log.PanicIf(err)
|
||||
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteRune(' ')
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// DumpBytesClauseToString returns a comma-separated list of hex-encoded bytes.
|
||||
func DumpBytesClauseToString(data []byte) string {
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("0x%02x", x))
|
||||
log.PanicIf(err)
|
||||
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteString(", ")
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// ExifFullTimestampString produces a string like "2018:11:30 13:01:49" from a
|
||||
// `time.Time` struct. It will attempt to convert to UTC first.
|
||||
func ExifFullTimestampString(t time.Time) (fullTimestampPhrase string) {
|
||||
t = t.UTC()
|
||||
|
||||
return fmt.Sprintf("%04d:%02d:%02d %02d:%02d:%02d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
|
||||
}
|
|
@ -0,0 +1,412 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
parser *Parser
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrNotFarValue indicates that an offset-based lookup was attempted for a
|
||||
// non-offset-based (embedded) value.
|
||||
ErrNotFarValue = errors.New("not a far value")
|
||||
)
|
||||
|
||||
// ValueContext embeds all of the parameters required to find and extract the
|
||||
// actual tag value.
|
||||
type ValueContext struct {
|
||||
unitCount uint32
|
||||
valueOffset uint32
|
||||
rawValueOffset []byte
|
||||
addressableData []byte
|
||||
|
||||
tagType TagTypePrimitive
|
||||
byteOrder binary.ByteOrder
|
||||
|
||||
// undefinedValueTagType is the effective type to use if this is an
|
||||
// "undefined" value.
|
||||
undefinedValueTagType TagTypePrimitive
|
||||
|
||||
ifdPath string
|
||||
tagId uint16
|
||||
}
|
||||
|
||||
// TODO(dustin): We can update newValueContext() to derive `valueOffset` itself (from `rawValueOffset`).
|
||||
|
||||
// NewValueContext returns a new ValueContext struct.
|
||||
func NewValueContext(ifdPath string, tagId uint16, unitCount, valueOffset uint32, rawValueOffset, addressableData []byte, tagType TagTypePrimitive, byteOrder binary.ByteOrder) *ValueContext {
|
||||
return &ValueContext{
|
||||
unitCount: unitCount,
|
||||
valueOffset: valueOffset,
|
||||
rawValueOffset: rawValueOffset,
|
||||
addressableData: addressableData,
|
||||
|
||||
tagType: tagType,
|
||||
byteOrder: byteOrder,
|
||||
|
||||
ifdPath: ifdPath,
|
||||
tagId: tagId,
|
||||
}
|
||||
}
|
||||
|
||||
// SetUndefinedValueType sets the effective type if this is an unknown-type tag.
|
||||
func (vc *ValueContext) SetUndefinedValueType(tagType TagTypePrimitive) {
|
||||
if vc.tagType != TypeUndefined {
|
||||
log.Panicf("can not set effective type for unknown-type tag because this is *not* an unknown-type tag")
|
||||
}
|
||||
|
||||
vc.undefinedValueTagType = tagType
|
||||
}
|
||||
|
||||
// UnitCount returns the embedded unit-count.
|
||||
func (vc *ValueContext) UnitCount() uint32 {
|
||||
return vc.unitCount
|
||||
}
|
||||
|
||||
// ValueOffset returns the value-offset decoded as a `uint32`.
|
||||
func (vc *ValueContext) ValueOffset() uint32 {
|
||||
return vc.valueOffset
|
||||
}
|
||||
|
||||
// RawValueOffset returns the uninterpreted value-offset. This is used for
|
||||
// embedded values (values small enough to fit within the offset bytes rather
|
||||
// than needing to be stored elsewhere and referred to by an actual offset).
|
||||
func (vc *ValueContext) RawValueOffset() []byte {
|
||||
return vc.rawValueOffset
|
||||
}
|
||||
|
||||
// AddressableData returns the block of data that we can dereference into.
|
||||
func (vc *ValueContext) AddressableData() []byte {
|
||||
return vc.addressableData
|
||||
}
|
||||
|
||||
// ByteOrder returns the byte-order of numbers.
|
||||
func (vc *ValueContext) ByteOrder() binary.ByteOrder {
|
||||
return vc.byteOrder
|
||||
}
|
||||
|
||||
// IfdPath returns the path of the IFD containing this tag.
|
||||
func (vc *ValueContext) IfdPath() string {
|
||||
return vc.ifdPath
|
||||
}
|
||||
|
||||
// TagId returns the ID of the tag that we represent.
|
||||
func (vc *ValueContext) TagId() uint16 {
|
||||
return vc.tagId
|
||||
}
|
||||
|
||||
// isEmbedded returns whether the value is embedded or a reference. This can't
|
||||
// be precalculated since the size is not defined for all types (namely the
|
||||
// "undefined" types).
|
||||
func (vc *ValueContext) isEmbedded() bool {
|
||||
tagType := vc.effectiveValueType()
|
||||
|
||||
return (tagType.Size() * int(vc.unitCount)) <= 4
|
||||
}
|
||||
|
||||
// SizeInBytes returns the number of bytes that this value requires. The
|
||||
// underlying call will panic if the type is UNDEFINED. It is the
|
||||
// responsibility of the caller to preemptively check that.
|
||||
func (vc *ValueContext) SizeInBytes() int {
|
||||
tagType := vc.effectiveValueType()
|
||||
|
||||
return tagType.Size() * int(vc.unitCount)
|
||||
}
|
||||
|
||||
// effectiveValueType returns the effective type of the unknown-type tag or, if
|
||||
// not unknown, the actual type.
|
||||
func (vc *ValueContext) effectiveValueType() (tagType TagTypePrimitive) {
|
||||
if vc.tagType == TypeUndefined {
|
||||
tagType = vc.undefinedValueTagType
|
||||
|
||||
if tagType == 0 {
|
||||
log.Panicf("undefined-value type not set")
|
||||
}
|
||||
} else {
|
||||
tagType = vc.tagType
|
||||
}
|
||||
|
||||
return tagType
|
||||
}
|
||||
|
||||
// readRawEncoded returns the encoded bytes for the value that we represent.
|
||||
func (vc *ValueContext) readRawEncoded() (rawBytes []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
tagType := vc.effectiveValueType()
|
||||
|
||||
unitSizeRaw := uint32(tagType.Size())
|
||||
|
||||
if vc.isEmbedded() == true {
|
||||
byteLength := unitSizeRaw * vc.unitCount
|
||||
return vc.rawValueOffset[:byteLength], nil
|
||||
}
|
||||
|
||||
return vc.addressableData[vc.valueOffset : vc.valueOffset+vc.unitCount*unitSizeRaw], nil
|
||||
}
|
||||
|
||||
// GetFarOffset returns the offset if the value is not embedded [within the
|
||||
// pointer itself] or an error if an embedded value.
|
||||
func (vc *ValueContext) GetFarOffset() (offset uint32, err error) {
|
||||
if vc.isEmbedded() == true {
|
||||
return 0, ErrNotFarValue
|
||||
}
|
||||
|
||||
return vc.valueOffset, nil
|
||||
}
|
||||
|
||||
// ReadRawEncoded returns the encoded bytes for the value that we represent.
|
||||
func (vc *ValueContext) ReadRawEncoded() (rawBytes []byte, err error) {
|
||||
|
||||
// TODO(dustin): Remove this method and rename readRawEncoded in its place.
|
||||
|
||||
return vc.readRawEncoded()
|
||||
}
|
||||
|
||||
// Format returns a string representation for the value.
|
||||
//
|
||||
// Where the type is not ASCII, `justFirst` indicates whether to just stringify
|
||||
// the first item in the slice (or return an empty string if the slice is
|
||||
// empty).
|
||||
//
|
||||
// Since this method lacks the information to process undefined-type tags (e.g.
|
||||
// byte-order, tag-ID, IFD type), it will return an error if attempted. See
|
||||
// `Undefined()`.
|
||||
func (vc *ValueContext) Format() (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawBytes, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
phrase, err := FormatFromBytes(rawBytes, vc.effectiveValueType(), false, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// FormatFirst is similar to `Format` but only gets and stringifies the first
|
||||
// item.
|
||||
func (vc *ValueContext) FormatFirst() (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawBytes, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
phrase, err := FormatFromBytes(rawBytes, vc.tagType, true, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// ReadBytes parses the encoded byte-array from the value-context.
|
||||
func (vc *ValueContext) ReadBytes() (value []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseBytes(rawValue, vc.unitCount)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadAscii parses the encoded NUL-terminated ASCII string from the value-
|
||||
// context.
|
||||
func (vc *ValueContext) ReadAscii() (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseAscii(rawValue, vc.unitCount)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadAsciiNoNul parses the non-NUL-terminated encoded ASCII string from the
|
||||
// value-context.
|
||||
func (vc *ValueContext) ReadAsciiNoNul() (value string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseAsciiNoNul(rawValue, vc.unitCount)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadShorts parses the list of encoded shorts from the value-context.
|
||||
func (vc *ValueContext) ReadShorts() (value []uint16, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseShorts(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadLongs parses the list of encoded, unsigned longs from the value-context.
|
||||
func (vc *ValueContext) ReadLongs() (value []uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseLongs(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadRationals parses the list of encoded, unsigned rationals from the value-
|
||||
// context.
|
||||
func (vc *ValueContext) ReadRationals() (value []Rational, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseRationals(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadSignedLongs parses the list of encoded, signed longs from the value-context.
|
||||
func (vc *ValueContext) ReadSignedLongs() (value []int32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseSignedLongs(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadSignedRationals parses the list of encoded, signed rationals from the
|
||||
// value-context.
|
||||
func (vc *ValueContext) ReadSignedRationals() (value []SignedRational, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseSignedRationals(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// Values knows how to resolve the given value. This value is always a list
|
||||
// (undefined-values aside), so we're named accordingly.
|
||||
//
|
||||
// Since this method lacks the information to process unknown-type tags (e.g.
|
||||
// byte-order, tag-ID, IFD type), it will return an error if attempted. See
|
||||
// `Undefined()`.
|
||||
func (vc *ValueContext) Values() (values interface{}, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if vc.tagType == TypeByte {
|
||||
values, err = vc.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeAscii {
|
||||
values, err = vc.ReadAscii()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeAsciiNoNul {
|
||||
values, err = vc.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeShort {
|
||||
values, err = vc.ReadShorts()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeLong {
|
||||
values, err = vc.ReadLongs()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeRational {
|
||||
values, err = vc.ReadRationals()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeSignedLong {
|
||||
values, err = vc.ReadSignedLongs()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeSignedRational {
|
||||
values, err = vc.ReadSignedRationals()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeUndefined {
|
||||
log.Panicf("will not parse undefined-type value")
|
||||
|
||||
// Never called.
|
||||
return nil, nil
|
||||
} else {
|
||||
log.Panicf("value of type [%s] is unparseable", vc.tagType)
|
||||
// Never called.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
parser = new(Parser)
|
||||
}
|
|
@ -0,0 +1,229 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
typeEncodeLogger = log.NewLogger("exif.type_encode")
|
||||
)
|
||||
|
||||
// EncodedData encapsulates the compound output of an encoding operation.
|
||||
type EncodedData struct {
|
||||
Type TagTypePrimitive
|
||||
Encoded []byte
|
||||
|
||||
// TODO(dustin): Is this really necessary? We might have this just to correlate to the incoming stream format (raw bytes and a unit-count both for incoming and outgoing).
|
||||
UnitCount uint32
|
||||
}
|
||||
|
||||
// ValueEncoder knows how to encode values of every type to bytes.
|
||||
type ValueEncoder struct {
|
||||
byteOrder binary.ByteOrder
|
||||
}
|
||||
|
||||
// NewValueEncoder returns a new ValueEncoder.
|
||||
func NewValueEncoder(byteOrder binary.ByteOrder) *ValueEncoder {
|
||||
return &ValueEncoder{
|
||||
byteOrder: byteOrder,
|
||||
}
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeBytes(value []uint8) (ed EncodedData, err error) {
|
||||
ed.Type = TypeByte
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeAscii(value string) (ed EncodedData, err error) {
|
||||
ed.Type = TypeAscii
|
||||
|
||||
ed.Encoded = []byte(value)
|
||||
ed.Encoded = append(ed.Encoded, 0)
|
||||
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
// encodeAsciiNoNul returns a string encoded as a byte-string without a trailing
|
||||
// NUL byte.
|
||||
//
|
||||
// Note that:
|
||||
//
|
||||
// 1. This type can not be automatically encoded using `Encode()`. The default
|
||||
// mode is to encode *with* a trailing NUL byte using `encodeAscii`. Only
|
||||
// certain undefined-type tags using an unterminated ASCII string and these
|
||||
// are exceptional in nature.
|
||||
//
|
||||
// 2. The presence of this method allows us to completely test the complimentary
|
||||
// no-nul parser.
|
||||
//
|
||||
func (ve *ValueEncoder) encodeAsciiNoNul(value string) (ed EncodedData, err error) {
|
||||
ed.Type = TypeAsciiNoNul
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeShorts(value []uint16) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*2)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint16(ed.Encoded[i*2:(i+1)*2], value[i])
|
||||
}
|
||||
|
||||
ed.Type = TypeShort
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeLongs(value []uint32) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*4)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*4:(i+1)*4], value[i])
|
||||
}
|
||||
|
||||
ed.Type = TypeLong
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeRationals(value []Rational) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*8)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+0:i*8+4], value[i].Numerator)
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+4:i*8+8], value[i].Denominator)
|
||||
}
|
||||
|
||||
ed.Type = TypeRational
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeSignedLongs(value []int32) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i])
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ed.Type = TypeSignedLong
|
||||
ed.Encoded = b.Bytes()
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeSignedRationals(value []SignedRational) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i].Numerator)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, ve.byteOrder, value[i].Denominator)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ed.Type = TypeSignedRational
|
||||
ed.Encoded = b.Bytes()
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
// Encode returns bytes for the given value, infering type from the actual
|
||||
// value. This does not support `TypeAsciiNoNull` (all strings are encoded as
|
||||
// `TypeAscii`).
|
||||
func (ve *ValueEncoder) Encode(value interface{}) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
switch value.(type) {
|
||||
case []byte:
|
||||
ed, err = ve.encodeBytes(value.([]byte))
|
||||
log.PanicIf(err)
|
||||
case string:
|
||||
ed, err = ve.encodeAscii(value.(string))
|
||||
log.PanicIf(err)
|
||||
case []uint16:
|
||||
ed, err = ve.encodeShorts(value.([]uint16))
|
||||
log.PanicIf(err)
|
||||
case []uint32:
|
||||
ed, err = ve.encodeLongs(value.([]uint32))
|
||||
log.PanicIf(err)
|
||||
case []Rational:
|
||||
ed, err = ve.encodeRationals(value.([]Rational))
|
||||
log.PanicIf(err)
|
||||
case []int32:
|
||||
ed, err = ve.encodeSignedLongs(value.([]int32))
|
||||
log.PanicIf(err)
|
||||
case []SignedRational:
|
||||
ed, err = ve.encodeSignedRationals(value.([]SignedRational))
|
||||
log.PanicIf(err)
|
||||
case time.Time:
|
||||
// For convenience, if the user doesn't want to deal with translation
|
||||
// semantics with timestamps.
|
||||
|
||||
t := value.(time.Time)
|
||||
s := ExifFullTimestampString(t)
|
||||
|
||||
ed, err = ve.encodeAscii(s)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
log.Panicf("value not encodable: [%s] [%v]", reflect.TypeOf(value), value)
|
||||
}
|
||||
|
||||
return ed, nil
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrTagNotFound indicates that the tag was not found.
|
||||
ErrTagNotFound = errors.New("tag not found")
|
||||
|
||||
// ErrTagNotKnown indicates that the tag is not registered with us as a
|
||||
// known tag.
|
||||
ErrTagNotKnown = errors.New("tag is not known")
|
||||
)
|
|
@ -0,0 +1,258 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"encoding/binary"
|
||||
"io/ioutil"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
const (
|
||||
// ExifAddressableAreaStart is the absolute offset in the file that all
|
||||
// offsets are relative to.
|
||||
ExifAddressableAreaStart = uint32(0x0)
|
||||
|
||||
// ExifDefaultFirstIfdOffset is essentially the number of bytes in addition
|
||||
// to `ExifAddressableAreaStart` that you have to move in order to escape
|
||||
// the rest of the header and get to the earliest point where we can put
|
||||
// stuff (which has to be the first IFD). This is the size of the header
|
||||
// sequence containing the two-character byte-order, two-character fixed-
|
||||
// bytes, and the four bytes describing the first-IFD offset.
|
||||
ExifDefaultFirstIfdOffset = uint32(2 + 2 + 4)
|
||||
)
|
||||
|
||||
const (
|
||||
// ExifSignatureLength is the number of bytes in the EXIF signature (which
|
||||
// customarily includes the first IFD offset).
|
||||
ExifSignatureLength = 8
|
||||
)
|
||||
|
||||
var (
|
||||
exifLogger = log.NewLogger("exif.exif")
|
||||
|
||||
ExifBigEndianSignature = [4]byte{'M', 'M', 0x00, 0x2a}
|
||||
ExifLittleEndianSignature = [4]byte{'I', 'I', 0x2a, 0x00}
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNoExif = errors.New("no exif data")
|
||||
ErrExifHeaderError = errors.New("exif header error")
|
||||
)
|
||||
|
||||
// SearchAndExtractExif searches for an EXIF blob in the byte-slice.
|
||||
func SearchAndExtractExif(data []byte) (rawExif []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
b := bytes.NewBuffer(data)
|
||||
|
||||
rawExif, err = SearchAndExtractExifWithReader(b)
|
||||
if err != nil {
|
||||
if err == ErrNoExif {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
return rawExif, nil
|
||||
}
|
||||
|
||||
// SearchAndExtractExifWithReader searches for an EXIF blob using an
|
||||
// `io.Reader`. We can't know how much long the EXIF data is without parsing it,
|
||||
// so this will likely grab up a lot of the image-data, too.
|
||||
func SearchAndExtractExifWithReader(r io.Reader) (rawExif []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Search for the beginning of the EXIF information. The EXIF is near the
|
||||
// beginning of most JPEGs, so this likely doesn't have a high cost (at
|
||||
// least, again, with JPEGs).
|
||||
|
||||
br := bufio.NewReader(r)
|
||||
discarded := 0
|
||||
|
||||
for {
|
||||
window, err := br.Peek(ExifSignatureLength)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, ErrNoExif
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
_, err = ParseExifHeader(window)
|
||||
if err != nil {
|
||||
if log.Is(err, ErrNoExif) == true {
|
||||
// No EXIF. Move forward by one byte.
|
||||
|
||||
_, err := br.Discard(1)
|
||||
log.PanicIf(err)
|
||||
|
||||
discarded++
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Some other error.
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
exifLogger.Debugf(nil, "Found EXIF blob (%d) bytes from initial position.", discarded)
|
||||
|
||||
rawExif, err = ioutil.ReadAll(br)
|
||||
log.PanicIf(err)
|
||||
|
||||
return rawExif, nil
|
||||
}
|
||||
|
||||
// SearchFileAndExtractExif returns a slice from the beginning of the EXIF data
|
||||
// to the end of the file (it's not practical to try and calculate where the
|
||||
// data actually ends).
|
||||
func SearchFileAndExtractExif(filepath string) (rawExif []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Open the file.
|
||||
|
||||
f, err := os.Open(filepath)
|
||||
log.PanicIf(err)
|
||||
|
||||
defer f.Close()
|
||||
|
||||
rawExif, err = SearchAndExtractExifWithReader(f)
|
||||
log.PanicIf(err)
|
||||
|
||||
return rawExif, nil
|
||||
}
|
||||
|
||||
type ExifHeader struct {
|
||||
ByteOrder binary.ByteOrder
|
||||
FirstIfdOffset uint32
|
||||
}
|
||||
|
||||
func (eh ExifHeader) String() string {
|
||||
return fmt.Sprintf("ExifHeader<BYTE-ORDER=[%v] FIRST-IFD-OFFSET=(0x%02x)>", eh.ByteOrder, eh.FirstIfdOffset)
|
||||
}
|
||||
|
||||
// ParseExifHeader parses the bytes at the very top of the header.
|
||||
//
|
||||
// This will panic with ErrNoExif on any data errors so that we can double as
|
||||
// an EXIF-detection routine.
|
||||
func ParseExifHeader(data []byte) (eh ExifHeader, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Good reference:
|
||||
//
|
||||
// CIPA DC-008-2016; JEITA CP-3451D
|
||||
// -> http://www.cipa.jp/std/documents/e/DC-008-Translation-2016-E.pdf
|
||||
|
||||
if len(data) < ExifSignatureLength {
|
||||
exifLogger.Warningf(nil, "Not enough data for EXIF header: (%d)", len(data))
|
||||
return eh, ErrNoExif
|
||||
}
|
||||
|
||||
if bytes.Equal(data[:4], ExifBigEndianSignature[:]) == true {
|
||||
eh.ByteOrder = binary.BigEndian
|
||||
} else if bytes.Equal(data[:4], ExifLittleEndianSignature[:]) == true {
|
||||
eh.ByteOrder = binary.LittleEndian
|
||||
} else {
|
||||
return eh, ErrNoExif
|
||||
}
|
||||
|
||||
eh.FirstIfdOffset = eh.ByteOrder.Uint32(data[4:8])
|
||||
|
||||
return eh, nil
|
||||
}
|
||||
|
||||
// Visit recursively invokes a callback for every tag.
|
||||
func Visit(rootIfdIdentity *exifcommon.IfdIdentity, ifdMapping *exifcommon.IfdMapping, tagIndex *TagIndex, exifData []byte, visitor TagVisitorFn) (eh ExifHeader, furthestOffset uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
eh, err = ParseExifHeader(exifData)
|
||||
log.PanicIf(err)
|
||||
|
||||
ie := NewIfdEnumerate(ifdMapping, tagIndex, exifData, eh.ByteOrder)
|
||||
|
||||
_, err = ie.Scan(rootIfdIdentity, eh.FirstIfdOffset, visitor)
|
||||
log.PanicIf(err)
|
||||
|
||||
furthestOffset = ie.FurthestOffset()
|
||||
|
||||
return eh, furthestOffset, nil
|
||||
}
|
||||
|
||||
// Collect recursively builds a static structure of all IFDs and tags.
|
||||
func Collect(ifdMapping *exifcommon.IfdMapping, tagIndex *TagIndex, exifData []byte) (eh ExifHeader, index IfdIndex, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
eh, err = ParseExifHeader(exifData)
|
||||
log.PanicIf(err)
|
||||
|
||||
ie := NewIfdEnumerate(ifdMapping, tagIndex, exifData, eh.ByteOrder)
|
||||
|
||||
index, err = ie.Collect(eh.FirstIfdOffset)
|
||||
log.PanicIf(err)
|
||||
|
||||
return eh, index, nil
|
||||
}
|
||||
|
||||
// BuildExifHeader constructs the bytes that go at the front of the stream.
|
||||
func BuildExifHeader(byteOrder binary.ByteOrder, firstIfdOffset uint32) (headerBytes []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
var signatureBytes []byte
|
||||
if byteOrder == binary.BigEndian {
|
||||
signatureBytes = ExifBigEndianSignature[:]
|
||||
} else {
|
||||
signatureBytes = ExifLittleEndianSignature[:]
|
||||
}
|
||||
|
||||
_, err = b.Write(signatureBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, byteOrder, firstIfdOffset)
|
||||
log.PanicIf(err)
|
||||
|
||||
return b.Bytes(), nil
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
module github.com/dsoprea/go-exif/v2
|
||||
|
||||
go 1.13
|
||||
|
||||
// Development only
|
||||
// replace github.com/dsoprea/go-logging => ../../go-logging
|
||||
|
||||
require (
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf // indirect
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d
|
||||
github.com/jessevdk/go-flags v1.4.0
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
)
|
|
@ -0,0 +1,37 @@
|
|||
github.com/dsoprea/go-exif/v2 v2.0.0-20200321225314-640175a69fe4/go.mod h1:Lm2lMM2zx8p4a34ZemkaUV95AnMl4ZvLbCUbwOvLC2E=
|
||||
github.com/dsoprea/go-logging v0.0.0-20190624164917-c4f10aab7696 h1:VGFnZAcLwPpt1sHlAxml+pGLZz9A2s+K/s1YNhPC91Y=
|
||||
github.com/dsoprea/go-logging v0.0.0-20190624164917-c4f10aab7696/go.mod h1:Nm/x2ZUNRW6Fe5C3LxdY1PyZY5wmDv/s5dkPJ/VB3iA=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502191043-ec333ec7635f h1:XM9MVftaUNA4CcjV97+4bSy7u9Ns04DEYbZkswUrRtc=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502191043-ec333ec7635f/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502201358-170ff607885f h1:FonKAuW3PmNtqk9tOR+Z7bnyQHytmnZBCmm5z1PQMss=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502201358-170ff607885f/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d h1:F/7L5wr/fP/SKeO5HuMlNEX9Ipyx2MbH2rV9G4zJRpk=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf h1:/w4QxepU4AHh3AuO6/g8y/YIIHH5+aKP3Bj8sg5cqhU=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf/go.mod h1:95+K3z2L0mqsVYd6yveIv1lmtT3tcQQ3dVakPySffW8=
|
||||
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
github.com/go-errors/errors v1.0.2 h1:xMxH9j2fNg/L4hLn/4y3M0IUsn0M6Wbu/Uh9QlOfBh4=
|
||||
github.com/go-errors/errors v1.0.2/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec h1:lJwO/92dFXWeXOZdoGXgptLmNLwynMSHUmU6besqtiw=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d h1:C/hKUcHT483btRbeGkrRjJz+Zbcj8audldIi9tRJDCc=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553 h1:efeOvDhwQ29Dj3SdAV/MJf8oukgn+8D8WgaCaRMchF8=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5 h1:WQ8q63x+f/zpC8Ac1s9wLElVoHhm32p6tudrU72n1QA=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
@ -0,0 +1,117 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
"github.com/golang/geo/s2"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrGpsCoordinatesNotValid means that some part of the geographic data was
|
||||
// unparseable.
|
||||
ErrGpsCoordinatesNotValid = errors.New("GPS coordinates not valid")
|
||||
)
|
||||
|
||||
// GpsDegrees is a high-level struct representing geographic data.
|
||||
type GpsDegrees struct {
|
||||
// Orientation describes the N/E/S/W direction that this position is
|
||||
// relative to.
|
||||
Orientation byte
|
||||
|
||||
// Degrees is a simple float representing the underlying rational degrees
|
||||
// amount.
|
||||
Degrees float64
|
||||
|
||||
// Minutes is a simple float representing the underlying rational minutes
|
||||
// amount.
|
||||
Minutes float64
|
||||
|
||||
// Seconds is a simple float representing the underlying ration seconds
|
||||
// amount.
|
||||
Seconds float64
|
||||
}
|
||||
|
||||
// NewGpsDegreesFromRationals returns a GpsDegrees struct given the EXIF-encoded
|
||||
// information. The refValue is the N/E/S/W direction that this position is
|
||||
// relative to.
|
||||
func NewGpsDegreesFromRationals(refValue string, rawCoordinate []exifcommon.Rational) (gd GpsDegrees, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if len(rawCoordinate) != 3 {
|
||||
log.Panicf("new GpsDegrees struct requires a raw-coordinate with exactly three rationals")
|
||||
}
|
||||
|
||||
gd = GpsDegrees{
|
||||
Orientation: refValue[0],
|
||||
Degrees: float64(rawCoordinate[0].Numerator) / float64(rawCoordinate[0].Denominator),
|
||||
Minutes: float64(rawCoordinate[1].Numerator) / float64(rawCoordinate[1].Denominator),
|
||||
Seconds: float64(rawCoordinate[2].Numerator) / float64(rawCoordinate[2].Denominator),
|
||||
}
|
||||
|
||||
return gd, nil
|
||||
}
|
||||
|
||||
// String provides returns a descriptive string.
|
||||
func (d GpsDegrees) String() string {
|
||||
return fmt.Sprintf("Degrees<O=[%s] D=(%g) M=(%g) S=(%g)>", string([]byte{d.Orientation}), d.Degrees, d.Minutes, d.Seconds)
|
||||
}
|
||||
|
||||
// Decimal calculates and returns the simplified float representation of the
|
||||
// component degrees.
|
||||
func (d GpsDegrees) Decimal() float64 {
|
||||
decimal := float64(d.Degrees) + float64(d.Minutes)/60.0 + float64(d.Seconds)/3600.0
|
||||
|
||||
if d.Orientation == 'S' || d.Orientation == 'W' {
|
||||
return -decimal
|
||||
}
|
||||
|
||||
return decimal
|
||||
}
|
||||
|
||||
// Raw returns a Rational struct that can be used to *write* coordinates. In
|
||||
// practice, the denominator are typically (1) in the original EXIF data, and,
|
||||
// that being the case, this will best preserve precision.
|
||||
func (d GpsDegrees) Raw() []exifcommon.Rational {
|
||||
return []exifcommon.Rational{
|
||||
{Numerator: uint32(d.Degrees), Denominator: 1},
|
||||
{Numerator: uint32(d.Minutes), Denominator: 1},
|
||||
{Numerator: uint32(d.Seconds), Denominator: 1},
|
||||
}
|
||||
}
|
||||
|
||||
// GpsInfo encapsulates all of the geographic information in one place.
|
||||
type GpsInfo struct {
|
||||
Latitude, Longitude GpsDegrees
|
||||
Altitude int
|
||||
Timestamp time.Time
|
||||
}
|
||||
|
||||
// String returns a descriptive string.
|
||||
func (gi *GpsInfo) String() string {
|
||||
return fmt.Sprintf("GpsInfo<LAT=(%.05f) LON=(%.05f) ALT=(%d) TIME=[%s]>",
|
||||
gi.Latitude.Decimal(), gi.Longitude.Decimal(), gi.Altitude, gi.Timestamp)
|
||||
}
|
||||
|
||||
// S2CellId returns the cell-ID of the geographic location on the earth.
|
||||
func (gi *GpsInfo) S2CellId() s2.CellID {
|
||||
latitude := gi.Latitude.Decimal()
|
||||
longitude := gi.Longitude.Decimal()
|
||||
|
||||
ll := s2.LatLngFromDegrees(latitude, longitude)
|
||||
cellId := s2.CellIDFromLatLng(ll)
|
||||
|
||||
if cellId.IsValid() == false {
|
||||
panic(ErrGpsCoordinatesNotValid)
|
||||
}
|
||||
|
||||
return cellId
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
// TODO(dustin): This file now exists for backwards-compatibility only.
|
||||
|
||||
// NewIfdMapping returns a new IfdMapping struct.
|
||||
func NewIfdMapping() (ifdMapping *exifcommon.IfdMapping) {
|
||||
return exifcommon.NewIfdMapping()
|
||||
}
|
||||
|
||||
// NewIfdMappingWithStandard retruns a new IfdMapping struct preloaded with the
|
||||
// standard IFDs.
|
||||
func NewIfdMappingWithStandard() (ifdMapping *exifcommon.IfdMapping) {
|
||||
return exifcommon.NewIfdMappingWithStandard()
|
||||
}
|
||||
|
||||
// LoadStandardIfds loads the standard IFDs into the mapping.
|
||||
func LoadStandardIfds(im *exifcommon.IfdMapping) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
err = exifcommon.LoadStandardIfds(im)
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,531 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
const (
|
||||
// Tag-ID + Tag-Type + Unit-Count + Value/Offset.
|
||||
IfdTagEntrySize = uint32(2 + 2 + 4 + 4)
|
||||
)
|
||||
|
||||
type ByteWriter struct {
|
||||
b *bytes.Buffer
|
||||
byteOrder binary.ByteOrder
|
||||
}
|
||||
|
||||
func NewByteWriter(b *bytes.Buffer, byteOrder binary.ByteOrder) (bw *ByteWriter) {
|
||||
return &ByteWriter{
|
||||
b: b,
|
||||
byteOrder: byteOrder,
|
||||
}
|
||||
}
|
||||
|
||||
func (bw ByteWriter) writeAsBytes(value interface{}) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
err = binary.Write(bw.b, bw.byteOrder, value)
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bw ByteWriter) WriteUint32(value uint32) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
err = bw.writeAsBytes(value)
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bw ByteWriter) WriteUint16(value uint16) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
err = bw.writeAsBytes(value)
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (bw ByteWriter) WriteFourBytes(value []byte) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
len_ := len(value)
|
||||
if len_ != 4 {
|
||||
log.Panicf("value is not four-bytes: (%d)", len_)
|
||||
}
|
||||
|
||||
_, err = bw.b.Write(value)
|
||||
log.PanicIf(err)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ifdOffsetIterator keeps track of where the next IFD should be written by
|
||||
// keeping track of where the offsets start, the data that has been added, and
|
||||
// bumping the offset *when* the data is added.
|
||||
type ifdDataAllocator struct {
|
||||
offset uint32
|
||||
b bytes.Buffer
|
||||
}
|
||||
|
||||
func newIfdDataAllocator(ifdDataAddressableOffset uint32) *ifdDataAllocator {
|
||||
return &ifdDataAllocator{
|
||||
offset: ifdDataAddressableOffset,
|
||||
}
|
||||
}
|
||||
|
||||
func (ida *ifdDataAllocator) Allocate(value []byte) (offset uint32, err error) {
|
||||
_, err = ida.b.Write(value)
|
||||
log.PanicIf(err)
|
||||
|
||||
offset = ida.offset
|
||||
ida.offset += uint32(len(value))
|
||||
|
||||
return offset, nil
|
||||
}
|
||||
|
||||
func (ida *ifdDataAllocator) NextOffset() uint32 {
|
||||
return ida.offset
|
||||
}
|
||||
|
||||
func (ida *ifdDataAllocator) Bytes() []byte {
|
||||
return ida.b.Bytes()
|
||||
}
|
||||
|
||||
// IfdByteEncoder converts an IB to raw bytes (for writing) while also figuring
|
||||
// out all of the allocations and indirection that is required for extended
|
||||
// data.
|
||||
type IfdByteEncoder struct {
|
||||
// journal holds a list of actions taken while encoding.
|
||||
journal [][3]string
|
||||
}
|
||||
|
||||
func NewIfdByteEncoder() (ibe *IfdByteEncoder) {
|
||||
return &IfdByteEncoder{
|
||||
journal: make([][3]string, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (ibe *IfdByteEncoder) Journal() [][3]string {
|
||||
return ibe.journal
|
||||
}
|
||||
|
||||
func (ibe *IfdByteEncoder) TableSize(entryCount int) uint32 {
|
||||
// Tag-Count + (Entry-Size * Entry-Count) + Next-IFD-Offset.
|
||||
return uint32(2) + (IfdTagEntrySize * uint32(entryCount)) + uint32(4)
|
||||
}
|
||||
|
||||
func (ibe *IfdByteEncoder) pushToJournal(where, direction, format string, args ...interface{}) {
|
||||
event := [3]string{
|
||||
direction,
|
||||
where,
|
||||
fmt.Sprintf(format, args...),
|
||||
}
|
||||
|
||||
ibe.journal = append(ibe.journal, event)
|
||||
}
|
||||
|
||||
// PrintJournal prints a hierarchical representation of the steps taken during
|
||||
// encoding.
|
||||
func (ibe *IfdByteEncoder) PrintJournal() {
|
||||
maxWhereLength := 0
|
||||
for _, event := range ibe.journal {
|
||||
where := event[1]
|
||||
|
||||
len_ := len(where)
|
||||
if len_ > maxWhereLength {
|
||||
maxWhereLength = len_
|
||||
}
|
||||
}
|
||||
|
||||
level := 0
|
||||
for i, event := range ibe.journal {
|
||||
direction := event[0]
|
||||
where := event[1]
|
||||
message := event[2]
|
||||
|
||||
if direction != ">" && direction != "<" && direction != "-" {
|
||||
log.Panicf("journal operation not valid: [%s]", direction)
|
||||
}
|
||||
|
||||
if direction == "<" {
|
||||
if level <= 0 {
|
||||
log.Panicf("journal operations unbalanced (too many closes)")
|
||||
}
|
||||
|
||||
level--
|
||||
}
|
||||
|
||||
indent := strings.Repeat(" ", level)
|
||||
|
||||
fmt.Printf("%3d %s%s %s: %s\n", i, indent, direction, where, message)
|
||||
|
||||
if direction == ">" {
|
||||
level++
|
||||
}
|
||||
}
|
||||
|
||||
if level != 0 {
|
||||
log.Panicf("journal operations unbalanced (too many opens)")
|
||||
}
|
||||
}
|
||||
|
||||
// encodeTagToBytes encodes the given tag to a byte stream. If
|
||||
// `nextIfdOffsetToWrite` is more than (0), recurse into child IFDs
|
||||
// (`nextIfdOffsetToWrite` is required in order for them to know where the its
|
||||
// IFD data will be written, in order for them to know the offset of where
|
||||
// their allocated-data block will start, which follows right behind).
|
||||
func (ibe *IfdByteEncoder) encodeTagToBytes(ib *IfdBuilder, bt *BuilderTag, bw *ByteWriter, ida *ifdDataAllocator, nextIfdOffsetToWrite uint32) (childIfdBlock []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Write tag-ID.
|
||||
err = bw.WriteUint16(bt.tagId)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Works for both values and child IFDs (which have an official size of
|
||||
// LONG).
|
||||
err = bw.WriteUint16(uint16(bt.typeId))
|
||||
log.PanicIf(err)
|
||||
|
||||
// Write unit-count.
|
||||
|
||||
if bt.value.IsBytes() == true {
|
||||
effectiveType := bt.typeId
|
||||
if bt.typeId == exifcommon.TypeUndefined {
|
||||
effectiveType = exifcommon.TypeByte
|
||||
}
|
||||
|
||||
// It's a non-unknown value.Calculate the count of values of
|
||||
// the type that we're writing and the raw bytes for the whole list.
|
||||
|
||||
typeSize := uint32(effectiveType.Size())
|
||||
|
||||
valueBytes := bt.value.Bytes()
|
||||
|
||||
len_ := len(valueBytes)
|
||||
unitCount := uint32(len_) / typeSize
|
||||
|
||||
if _, found := tagsWithoutAlignment[bt.tagId]; found == false {
|
||||
remainder := uint32(len_) % typeSize
|
||||
|
||||
if remainder > 0 {
|
||||
log.Panicf("tag (0x%04x) value of (%d) bytes not evenly divisible by type-size (%d)", bt.tagId, len_, typeSize)
|
||||
}
|
||||
}
|
||||
|
||||
err = bw.WriteUint32(unitCount)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Write four-byte value/offset.
|
||||
|
||||
if len_ > 4 {
|
||||
offset, err := ida.Allocate(valueBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = bw.WriteUint32(offset)
|
||||
log.PanicIf(err)
|
||||
} else {
|
||||
fourBytes := make([]byte, 4)
|
||||
copy(fourBytes, valueBytes)
|
||||
|
||||
err = bw.WriteFourBytes(fourBytes)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
} else {
|
||||
if bt.value.IsIb() == false {
|
||||
log.Panicf("tag value is not a byte-slice but also not a child IB: %v", bt)
|
||||
}
|
||||
|
||||
// Write unit-count (one LONG representing one offset).
|
||||
err = bw.WriteUint32(1)
|
||||
log.PanicIf(err)
|
||||
|
||||
if nextIfdOffsetToWrite > 0 {
|
||||
var err error
|
||||
|
||||
ibe.pushToJournal("encodeTagToBytes", ">", "[%s]->[%s]", ib.IfdIdentity().UnindexedString(), bt.value.Ib().IfdIdentity().UnindexedString())
|
||||
|
||||
// Create the block of IFD data and everything it requires.
|
||||
childIfdBlock, err = ibe.encodeAndAttachIfd(bt.value.Ib(), nextIfdOffsetToWrite)
|
||||
log.PanicIf(err)
|
||||
|
||||
ibe.pushToJournal("encodeTagToBytes", "<", "[%s]->[%s]", bt.value.Ib().IfdIdentity().UnindexedString(), ib.IfdIdentity().UnindexedString())
|
||||
|
||||
// Use the next-IFD offset for it. The IFD will actually get
|
||||
// attached after we return.
|
||||
err = bw.WriteUint32(nextIfdOffsetToWrite)
|
||||
log.PanicIf(err)
|
||||
|
||||
} else {
|
||||
// No child-IFDs are to be allocated. Finish the entry with a NULL
|
||||
// pointer.
|
||||
|
||||
ibe.pushToJournal("encodeTagToBytes", "-", "*Not* descending to child: [%s]", bt.value.Ib().IfdIdentity().UnindexedString())
|
||||
|
||||
err = bw.WriteUint32(0)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
|
||||
return childIfdBlock, nil
|
||||
}
|
||||
|
||||
// encodeIfdToBytes encodes the given IB to a byte-slice. We are given the
|
||||
// offset at which this IFD will be written. This method is used called both to
|
||||
// pre-determine how big the table is going to be (so that we can calculate the
|
||||
// address to allocate data at) as well as to write the final table.
|
||||
//
|
||||
// It is necessary to fully realize the table in order to predetermine its size
|
||||
// because it is not enough to know the size of the table: If there are child
|
||||
// IFDs, we will not be able to allocate them without first knowing how much
|
||||
// data we need to allocate for the current IFD.
|
||||
func (ibe *IfdByteEncoder) encodeIfdToBytes(ib *IfdBuilder, ifdAddressableOffset uint32, nextIfdOffsetToWrite uint32, setNextIb bool) (data []byte, tableSize uint32, dataSize uint32, childIfdSizes []uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ibe.pushToJournal("encodeIfdToBytes", ">", "%s", ib)
|
||||
|
||||
tableSize = ibe.TableSize(len(ib.tags))
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
bw := NewByteWriter(b, ib.byteOrder)
|
||||
|
||||
// Write tag count.
|
||||
err = bw.WriteUint16(uint16(len(ib.tags)))
|
||||
log.PanicIf(err)
|
||||
|
||||
ida := newIfdDataAllocator(ifdAddressableOffset)
|
||||
|
||||
childIfdBlocks := make([][]byte, 0)
|
||||
|
||||
// Write raw bytes for each tag entry. Allocate larger data to be referred
|
||||
// to in the follow-up data-block as required. Any "unknown"-byte tags that
|
||||
// we can't parse will not be present here (using AddTagsFromExisting(), at
|
||||
// least).
|
||||
for _, bt := range ib.tags {
|
||||
childIfdBlock, err := ibe.encodeTagToBytes(ib, bt, bw, ida, nextIfdOffsetToWrite)
|
||||
log.PanicIf(err)
|
||||
|
||||
if childIfdBlock != nil {
|
||||
// We aren't allowed to have non-nil child IFDs if we're just
|
||||
// sizing things up.
|
||||
if nextIfdOffsetToWrite == 0 {
|
||||
log.Panicf("no IFD offset provided for child-IFDs; no new child-IFDs permitted")
|
||||
}
|
||||
|
||||
nextIfdOffsetToWrite += uint32(len(childIfdBlock))
|
||||
childIfdBlocks = append(childIfdBlocks, childIfdBlock)
|
||||
}
|
||||
}
|
||||
|
||||
dataBytes := ida.Bytes()
|
||||
dataSize = uint32(len(dataBytes))
|
||||
|
||||
childIfdSizes = make([]uint32, len(childIfdBlocks))
|
||||
childIfdsTotalSize := uint32(0)
|
||||
for i, childIfdBlock := range childIfdBlocks {
|
||||
len_ := uint32(len(childIfdBlock))
|
||||
childIfdSizes[i] = len_
|
||||
childIfdsTotalSize += len_
|
||||
}
|
||||
|
||||
// N the link from this IFD to the next IFD that will be written in the
|
||||
// next cycle.
|
||||
if setNextIb == true {
|
||||
// Write address of next IFD in chain. This will be the original
|
||||
// allocation offset plus the size of everything we have allocated for
|
||||
// this IFD and its child-IFDs.
|
||||
//
|
||||
// It is critical that this number is stepped properly. We experienced
|
||||
// an issue whereby it first looked like we were duplicating the IFD and
|
||||
// then that we were duplicating the tags in the wrong IFD, and then
|
||||
// finally we determined that the next-IFD offset for the first IFD was
|
||||
// accidentally pointing back to the EXIF IFD, so we were visiting it
|
||||
// twice when visiting through the tags after decoding. It was an
|
||||
// expensive bug to find.
|
||||
|
||||
ibe.pushToJournal("encodeIfdToBytes", "-", "Setting 'next' IFD to (0x%08x).", nextIfdOffsetToWrite)
|
||||
|
||||
err := bw.WriteUint32(nextIfdOffsetToWrite)
|
||||
log.PanicIf(err)
|
||||
} else {
|
||||
err := bw.WriteUint32(0)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
_, err = b.Write(dataBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Append any child IFD blocks after our table and data blocks. These IFDs
|
||||
// were equipped with the appropriate offset information so it's expected
|
||||
// that all offsets referred to by these will be correct.
|
||||
//
|
||||
// Note that child-IFDs are append after the current IFD and before the
|
||||
// next IFD, as opposed to the root IFDs, which are chained together but
|
||||
// will be interrupted by these child-IFDs (which is expected, per the
|
||||
// standard).
|
||||
|
||||
for _, childIfdBlock := range childIfdBlocks {
|
||||
_, err = b.Write(childIfdBlock)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ibe.pushToJournal("encodeIfdToBytes", "<", "%s", ib)
|
||||
|
||||
return b.Bytes(), tableSize, dataSize, childIfdSizes, nil
|
||||
}
|
||||
|
||||
// encodeAndAttachIfd is a reentrant function that processes the IFD chain.
|
||||
func (ibe *IfdByteEncoder) encodeAndAttachIfd(ib *IfdBuilder, ifdAddressableOffset uint32) (data []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", ">", "%s", ib)
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
i := 0
|
||||
|
||||
for thisIb := ib; thisIb != nil; thisIb = thisIb.nextIb {
|
||||
|
||||
// Do a dry-run in order to pre-determine its size requirement.
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", ">", "Beginning encoding process: (%d) [%s]", i, thisIb.IfdIdentity().UnindexedString())
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", ">", "Calculating size: (%d) [%s]", i, thisIb.IfdIdentity().UnindexedString())
|
||||
|
||||
_, tableSize, allocatedDataSize, _, err := ibe.encodeIfdToBytes(thisIb, ifdAddressableOffset, 0, false)
|
||||
log.PanicIf(err)
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", "<", "Finished calculating size: (%d) [%s]", i, thisIb.IfdIdentity().UnindexedString())
|
||||
|
||||
ifdAddressableOffset += tableSize
|
||||
nextIfdOffsetToWrite := ifdAddressableOffset + allocatedDataSize
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", ">", "Next IFD will be written at offset (0x%08x)", nextIfdOffsetToWrite)
|
||||
|
||||
// Write our IFD as well as any child-IFDs (now that we know the offset
|
||||
// where new IFDs and their data will be allocated).
|
||||
|
||||
setNextIb := thisIb.nextIb != nil
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", ">", "Encoding starting: (%d) [%s] NEXT-IFD-OFFSET-TO-WRITE=(0x%08x)", i, thisIb.IfdIdentity().UnindexedString(), nextIfdOffsetToWrite)
|
||||
|
||||
tableAndAllocated, effectiveTableSize, effectiveAllocatedDataSize, childIfdSizes, err :=
|
||||
ibe.encodeIfdToBytes(thisIb, ifdAddressableOffset, nextIfdOffsetToWrite, setNextIb)
|
||||
|
||||
log.PanicIf(err)
|
||||
|
||||
if effectiveTableSize != tableSize {
|
||||
log.Panicf("written table size does not match the pre-calculated table size: (%d) != (%d) %s", effectiveTableSize, tableSize, ib)
|
||||
} else if effectiveAllocatedDataSize != allocatedDataSize {
|
||||
log.Panicf("written allocated-data size does not match the pre-calculated allocated-data size: (%d) != (%d) %s", effectiveAllocatedDataSize, allocatedDataSize, ib)
|
||||
}
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", "<", "Encoding done: (%d) [%s]", i, thisIb.IfdIdentity().UnindexedString())
|
||||
|
||||
totalChildIfdSize := uint32(0)
|
||||
for _, childIfdSize := range childIfdSizes {
|
||||
totalChildIfdSize += childIfdSize
|
||||
}
|
||||
|
||||
if len(tableAndAllocated) != int(tableSize+allocatedDataSize+totalChildIfdSize) {
|
||||
log.Panicf("IFD table and data is not a consistent size: (%d) != (%d)", len(tableAndAllocated), tableSize+allocatedDataSize+totalChildIfdSize)
|
||||
}
|
||||
|
||||
// TODO(dustin): We might want to verify the original tableAndAllocated length, too.
|
||||
|
||||
_, err = b.Write(tableAndAllocated)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Advance past what we've allocated, thus far.
|
||||
|
||||
ifdAddressableOffset += allocatedDataSize + totalChildIfdSize
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", "<", "Finishing encoding process: (%d) [%s] [FINAL:] NEXT-IFD-OFFSET-TO-WRITE=(0x%08x)", i, ib.IfdIdentity().UnindexedString(), nextIfdOffsetToWrite)
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
ibe.pushToJournal("encodeAndAttachIfd", "<", "%s", ib)
|
||||
|
||||
return b.Bytes(), nil
|
||||
}
|
||||
|
||||
// EncodeToExifPayload is the base encoding step that transcribes the entire IB
|
||||
// structure to its on-disk layout.
|
||||
func (ibe *IfdByteEncoder) EncodeToExifPayload(ib *IfdBuilder) (data []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
data, err = ibe.encodeAndAttachIfd(ib, ExifDefaultFirstIfdOffset)
|
||||
log.PanicIf(err)
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// EncodeToExif calls EncodeToExifPayload and then packages the result into a
|
||||
// complete EXIF block.
|
||||
func (ibe *IfdByteEncoder) EncodeToExif(ib *IfdBuilder) (data []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
encodedIfds, err := ibe.EncodeToExifPayload(ib)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Wrap the IFD in a formal EXIF block.
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
headerBytes, err := BuildExifHeader(ib.byteOrder, ExifDefaultFirstIfdOffset)
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write(headerBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write(encodedIfds)
|
||||
log.PanicIf(err)
|
||||
|
||||
return b.Bytes(), nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,297 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
exifundefined "github.com/dsoprea/go-exif/v2/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
iteLogger = log.NewLogger("exif.ifd_tag_entry")
|
||||
)
|
||||
|
||||
// IfdTagEntry refers to a tag in the loaded EXIF block.
|
||||
type IfdTagEntry struct {
|
||||
tagId uint16
|
||||
tagIndex int
|
||||
tagType exifcommon.TagTypePrimitive
|
||||
unitCount uint32
|
||||
valueOffset uint32
|
||||
rawValueOffset []byte
|
||||
|
||||
// childIfdName is the right most atom in the IFD-path. We need this to
|
||||
// construct the fully-qualified IFD-path.
|
||||
childIfdName string
|
||||
|
||||
// childIfdPath is the IFD-path of the child if this tag represents a child
|
||||
// IFD.
|
||||
childIfdPath string
|
||||
|
||||
// childFqIfdPath is the IFD-path of the child if this tag represents a
|
||||
// child IFD. Includes indices.
|
||||
childFqIfdPath string
|
||||
|
||||
// TODO(dustin): !! IB's host the child-IBs directly in the tag, but that's not the case here. Refactor to accommodate it for a consistent experience.
|
||||
|
||||
ifdIdentity *exifcommon.IfdIdentity
|
||||
|
||||
isUnhandledUnknown bool
|
||||
|
||||
addressableData []byte
|
||||
byteOrder binary.ByteOrder
|
||||
|
||||
tagName string
|
||||
}
|
||||
|
||||
func newIfdTagEntry(ii *exifcommon.IfdIdentity, tagId uint16, tagIndex int, tagType exifcommon.TagTypePrimitive, unitCount uint32, valueOffset uint32, rawValueOffset []byte, addressableData []byte, byteOrder binary.ByteOrder) *IfdTagEntry {
|
||||
return &IfdTagEntry{
|
||||
ifdIdentity: ii,
|
||||
tagId: tagId,
|
||||
tagIndex: tagIndex,
|
||||
tagType: tagType,
|
||||
unitCount: unitCount,
|
||||
valueOffset: valueOffset,
|
||||
rawValueOffset: rawValueOffset,
|
||||
addressableData: addressableData,
|
||||
byteOrder: byteOrder,
|
||||
}
|
||||
}
|
||||
|
||||
// String returns a stringified representation of the struct.
|
||||
func (ite *IfdTagEntry) String() string {
|
||||
return fmt.Sprintf("IfdTagEntry<TAG-IFD-PATH=[%s] TAG-ID=(0x%04x) TAG-TYPE=[%s] UNIT-COUNT=(%d)>", ite.ifdIdentity.String(), ite.tagId, ite.tagType.String(), ite.unitCount)
|
||||
}
|
||||
|
||||
// TagName returns the name of the tag. This is determined else and set after
|
||||
// the parse (since it's not actually stored in the stream). If it's empty, it
|
||||
// is because it is an unknown tag (nonstandard or otherwise unavailable in the
|
||||
// tag-index).
|
||||
func (ite *IfdTagEntry) TagName() string {
|
||||
return ite.tagName
|
||||
}
|
||||
|
||||
// setTagName sets the tag-name. This provides the name for convenience and
|
||||
// efficiency by determining it when most efficient while we're parsing rather
|
||||
// than delegating it to the caller (or, worse, the user).
|
||||
func (ite *IfdTagEntry) setTagName(tagName string) {
|
||||
ite.tagName = tagName
|
||||
}
|
||||
|
||||
// IfdPath returns the fully-qualified path of the IFD that owns this tag.
|
||||
func (ite *IfdTagEntry) IfdPath() string {
|
||||
return ite.ifdIdentity.String()
|
||||
}
|
||||
|
||||
// TagId returns the ID of the tag that we represent. The combination of
|
||||
// (IfdPath(), TagId()) is unique.
|
||||
func (ite *IfdTagEntry) TagId() uint16 {
|
||||
return ite.tagId
|
||||
}
|
||||
|
||||
// IsThumbnailOffset returns true if the tag has the IFD and tag-ID of a
|
||||
// thumbnail offset.
|
||||
func (ite *IfdTagEntry) IsThumbnailOffset() bool {
|
||||
return ite.tagId == ThumbnailOffsetTagId && ite.ifdIdentity.String() == ThumbnailFqIfdPath
|
||||
}
|
||||
|
||||
// IsThumbnailSize returns true if the tag has the IFD and tag-ID of a thumbnail
|
||||
// size.
|
||||
func (ite *IfdTagEntry) IsThumbnailSize() bool {
|
||||
return ite.tagId == ThumbnailSizeTagId && ite.ifdIdentity.String() == ThumbnailFqIfdPath
|
||||
}
|
||||
|
||||
// TagType is the type of value for this tag.
|
||||
func (ite *IfdTagEntry) TagType() exifcommon.TagTypePrimitive {
|
||||
return ite.tagType
|
||||
}
|
||||
|
||||
// updateTagType sets an alternatively interpreted tag-type.
|
||||
func (ite *IfdTagEntry) updateTagType(tagType exifcommon.TagTypePrimitive) {
|
||||
ite.tagType = tagType
|
||||
}
|
||||
|
||||
// UnitCount returns the unit-count of the tag's value.
|
||||
func (ite *IfdTagEntry) UnitCount() uint32 {
|
||||
return ite.unitCount
|
||||
}
|
||||
|
||||
// updateUnitCount sets an alternatively interpreted unit-count.
|
||||
func (ite *IfdTagEntry) updateUnitCount(unitCount uint32) {
|
||||
ite.unitCount = unitCount
|
||||
}
|
||||
|
||||
// getValueOffset is the four-byte offset converted to an integer to point to
|
||||
// the location of its value in the EXIF block. The "get" parameter is obviously
|
||||
// used in order to differentiate the naming of the method from the field.
|
||||
func (ite *IfdTagEntry) getValueOffset() uint32 {
|
||||
return ite.valueOffset
|
||||
}
|
||||
|
||||
// GetRawBytes renders a specific list of bytes from the value in this tag.
|
||||
func (ite *IfdTagEntry) GetRawBytes() (rawBytes []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext := ite.getValueContext()
|
||||
|
||||
if ite.tagType == exifcommon.TypeUndefined {
|
||||
value, err := exifundefined.Decode(valueContext)
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
ite.setIsUnhandledUnknown(true)
|
||||
return nil, exifundefined.ErrUnparseableValue
|
||||
} else if err == exifundefined.ErrUnparseableValue {
|
||||
return nil, err
|
||||
} else {
|
||||
log.Panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Encode it back, in order to get the raw bytes. This is the best,
|
||||
// general way to do it with an undefined tag.
|
||||
|
||||
rawBytes, _, err := exifundefined.Encode(value, ite.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return rawBytes, nil
|
||||
}
|
||||
|
||||
rawBytes, err = valueContext.ReadRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
return rawBytes, nil
|
||||
}
|
||||
|
||||
// Value returns the specific, parsed, typed value from the tag.
|
||||
func (ite *IfdTagEntry) Value() (value interface{}, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext := ite.getValueContext()
|
||||
|
||||
if ite.tagType == exifcommon.TypeUndefined {
|
||||
var err error
|
||||
|
||||
value, err = exifundefined.Decode(valueContext)
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag || err == exifundefined.ErrUnparseableValue {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
|
||||
value, err = valueContext.Values()
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// Format returns the tag's value as a string.
|
||||
func (ite *IfdTagEntry) Format() (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
value, err := ite.Value()
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
return exifundefined.UnparseableUnknownTagValuePlaceholder, nil
|
||||
} else if err == exifundefined.ErrUnparseableValue {
|
||||
return exifundefined.UnparseableHandledTagValuePlaceholder, nil
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
phrase, err = exifcommon.FormatFromType(value, false)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// FormatFirst returns the same as Format() but only the first item.
|
||||
func (ite *IfdTagEntry) FormatFirst() (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): We should add a convenience type "timestamp", to simplify translating to and from the physical ASCII and provide validation.
|
||||
|
||||
value, err := ite.Value()
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
return exifundefined.UnparseableUnknownTagValuePlaceholder, nil
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
phrase, err = exifcommon.FormatFromType(value, true)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
func (ite *IfdTagEntry) setIsUnhandledUnknown(isUnhandledUnknown bool) {
|
||||
ite.isUnhandledUnknown = isUnhandledUnknown
|
||||
}
|
||||
|
||||
// SetChildIfd sets child-IFD information (if we represent a child IFD).
|
||||
func (ite *IfdTagEntry) SetChildIfd(ii *exifcommon.IfdIdentity) {
|
||||
ite.childFqIfdPath = ii.String()
|
||||
ite.childIfdPath = ii.UnindexedString()
|
||||
ite.childIfdName = ii.Name()
|
||||
}
|
||||
|
||||
// ChildIfdName returns the name of the child IFD
|
||||
func (ite *IfdTagEntry) ChildIfdName() string {
|
||||
return ite.childIfdName
|
||||
}
|
||||
|
||||
// ChildIfdPath returns the path of the child IFD.
|
||||
func (ite *IfdTagEntry) ChildIfdPath() string {
|
||||
return ite.childIfdPath
|
||||
}
|
||||
|
||||
// ChildFqIfdPath returns the complete path of the child IFD along with the
|
||||
// numeric suffixes differentiating sibling occurrences of the same type. "0"
|
||||
// indices are omitted.
|
||||
func (ite *IfdTagEntry) ChildFqIfdPath() string {
|
||||
return ite.childFqIfdPath
|
||||
}
|
||||
|
||||
// IfdIdentity returns the IfdIdentity associated with this tag.
|
||||
func (ite *IfdTagEntry) IfdIdentity() *exifcommon.IfdIdentity {
|
||||
return ite.ifdIdentity
|
||||
}
|
||||
|
||||
func (ite *IfdTagEntry) getValueContext() *exifcommon.ValueContext {
|
||||
return exifcommon.NewValueContext(
|
||||
ite.ifdIdentity.String(),
|
||||
ite.tagId,
|
||||
ite.unitCount,
|
||||
ite.valueOffset,
|
||||
ite.rawValueOffset,
|
||||
ite.addressableData,
|
||||
ite.tagType,
|
||||
ite.byteOrder)
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
// Package exif parses raw EXIF information given a block of raw EXIF data. It
|
||||
// can also construct new EXIF information, and provides tools for doing so.
|
||||
// This package is not involved with the parsing of particular file-formats.
|
||||
//
|
||||
// The EXIF data must first be extracted and then provided to us. Conversely,
|
||||
// when constructing new EXIF data, the caller is responsible for packaging
|
||||
// this in whichever format they require.
|
||||
package exif
|
|
@ -0,0 +1,411 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
const (
|
||||
// IFD1
|
||||
|
||||
// ThumbnailFqIfdPath is the fully-qualified IFD path that the thumbnail
|
||||
// must be found in.
|
||||
ThumbnailFqIfdPath = "IFD1"
|
||||
|
||||
// ThumbnailOffsetTagId returns the tag-ID of the thumbnail offset.
|
||||
ThumbnailOffsetTagId = 0x0201
|
||||
|
||||
// ThumbnailSizeTagId returns the tag-ID of the thumbnail size.
|
||||
ThumbnailSizeTagId = 0x0202
|
||||
)
|
||||
|
||||
const (
|
||||
// GPS
|
||||
|
||||
// TagGpsVersionId is the ID of the GPS version tag.
|
||||
TagGpsVersionId = 0x0000
|
||||
|
||||
// TagLatitudeId is the ID of the GPS latitude tag.
|
||||
TagLatitudeId = 0x0002
|
||||
|
||||
// TagLatitudeRefId is the ID of the GPS latitude orientation tag.
|
||||
TagLatitudeRefId = 0x0001
|
||||
|
||||
// TagLongitudeId is the ID of the GPS longitude tag.
|
||||
TagLongitudeId = 0x0004
|
||||
|
||||
// TagLongitudeRefId is the ID of the GPS longitude-orientation tag.
|
||||
TagLongitudeRefId = 0x0003
|
||||
|
||||
// TagTimestampId is the ID of the GPS time tag.
|
||||
TagTimestampId = 0x0007
|
||||
|
||||
// TagDatestampId is the ID of the GPS date tag.
|
||||
TagDatestampId = 0x001d
|
||||
|
||||
// TagAltitudeId is the ID of the GPS altitude tag.
|
||||
TagAltitudeId = 0x0006
|
||||
|
||||
// TagAltitudeRefId is the ID of the GPS altitude-orientation tag.
|
||||
TagAltitudeRefId = 0x0005
|
||||
)
|
||||
|
||||
var (
|
||||
// tagsWithoutAlignment is a tag-lookup for tags whose value size won't
|
||||
// necessarily be a multiple of its tag-type.
|
||||
tagsWithoutAlignment = map[uint16]struct{}{
|
||||
// The thumbnail offset is stored as a long, but its data is a binary
|
||||
// blob (not a slice of longs).
|
||||
ThumbnailOffsetTagId: {},
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
tagsLogger = log.NewLogger("exif.tags")
|
||||
)
|
||||
|
||||
// File structures.
|
||||
|
||||
type encodedTag struct {
|
||||
// id is signed, here, because YAML doesn't have enough information to
|
||||
// support unsigned.
|
||||
Id int `yaml:"id"`
|
||||
Name string `yaml:"name"`
|
||||
TypeName string `yaml:"type_name"`
|
||||
TypeNames []string `yaml:"type_names"`
|
||||
}
|
||||
|
||||
// Indexing structures.
|
||||
|
||||
// IndexedTag describes one index lookup result.
|
||||
type IndexedTag struct {
|
||||
// Id is the tag-ID.
|
||||
Id uint16
|
||||
|
||||
// Name is the tag name.
|
||||
Name string
|
||||
|
||||
// IfdPath is the proper IFD path of this tag. This is not fully-qualified.
|
||||
IfdPath string
|
||||
|
||||
// SupportedTypes is an unsorted list of allowed tag-types.
|
||||
SupportedTypes []exifcommon.TagTypePrimitive
|
||||
}
|
||||
|
||||
// String returns a descriptive string.
|
||||
func (it *IndexedTag) String() string {
|
||||
return fmt.Sprintf("TAG<ID=(0x%04x) NAME=[%s] IFD=[%s]>", it.Id, it.Name, it.IfdPath)
|
||||
}
|
||||
|
||||
// IsName returns true if this tag matches the given tag name.
|
||||
func (it *IndexedTag) IsName(ifdPath, name string) bool {
|
||||
return it.Name == name && it.IfdPath == ifdPath
|
||||
}
|
||||
|
||||
// Is returns true if this tag matched the given tag ID.
|
||||
func (it *IndexedTag) Is(ifdPath string, id uint16) bool {
|
||||
return it.Id == id && it.IfdPath == ifdPath
|
||||
}
|
||||
|
||||
// GetEncodingType returns the largest type that this tag's value can occupy.
|
||||
func (it *IndexedTag) GetEncodingType(value interface{}) exifcommon.TagTypePrimitive {
|
||||
// For convenience, we handle encoding a `time.Time` directly.
|
||||
if IsTime(value) == true {
|
||||
// Timestamps are encoded as ASCII.
|
||||
value = ""
|
||||
}
|
||||
|
||||
if len(it.SupportedTypes) == 0 {
|
||||
log.Panicf("IndexedTag [%s] (%d) has no supported types.", it.IfdPath, it.Id)
|
||||
} else if len(it.SupportedTypes) == 1 {
|
||||
return it.SupportedTypes[0]
|
||||
}
|
||||
|
||||
supportsLong := false
|
||||
supportsShort := false
|
||||
supportsRational := false
|
||||
supportsSignedRational := false
|
||||
for _, supportedType := range it.SupportedTypes {
|
||||
if supportedType == exifcommon.TypeLong {
|
||||
supportsLong = true
|
||||
} else if supportedType == exifcommon.TypeShort {
|
||||
supportsShort = true
|
||||
} else if supportedType == exifcommon.TypeRational {
|
||||
supportsRational = true
|
||||
} else if supportedType == exifcommon.TypeSignedRational {
|
||||
supportsSignedRational = true
|
||||
}
|
||||
}
|
||||
|
||||
// We specifically check for the cases that we know to expect.
|
||||
|
||||
if supportsLong == true && supportsShort == true {
|
||||
return exifcommon.TypeLong
|
||||
} else if supportsRational == true && supportsSignedRational == true {
|
||||
if value == nil {
|
||||
log.Panicf("GetEncodingType: require value to be given")
|
||||
}
|
||||
|
||||
if _, ok := value.(exifcommon.SignedRational); ok == true {
|
||||
return exifcommon.TypeSignedRational
|
||||
}
|
||||
|
||||
return exifcommon.TypeRational
|
||||
}
|
||||
|
||||
log.Panicf("WidestSupportedType() case is not handled for tag [%s] (0x%04x): %v", it.IfdPath, it.Id, it.SupportedTypes)
|
||||
return 0
|
||||
}
|
||||
|
||||
// DoesSupportType returns true if this tag can be found/decoded with this type.
|
||||
func (it *IndexedTag) DoesSupportType(tagType exifcommon.TagTypePrimitive) bool {
|
||||
// This is always a very small collection. So, we keep it unsorted.
|
||||
for _, thisTagType := range it.SupportedTypes {
|
||||
if thisTagType == tagType {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// TagIndex is a tag-lookup facility.
|
||||
type TagIndex struct {
|
||||
tagsByIfd map[string]map[uint16]*IndexedTag
|
||||
tagsByIfdR map[string]map[string]*IndexedTag
|
||||
}
|
||||
|
||||
// NewTagIndex returns a new TagIndex struct.
|
||||
func NewTagIndex() *TagIndex {
|
||||
ti := new(TagIndex)
|
||||
|
||||
ti.tagsByIfd = make(map[string]map[uint16]*IndexedTag)
|
||||
ti.tagsByIfdR = make(map[string]map[string]*IndexedTag)
|
||||
|
||||
return ti
|
||||
}
|
||||
|
||||
// Add registers a new tag to be recognized during the parse.
|
||||
func (ti *TagIndex) Add(it *IndexedTag) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Store by ID.
|
||||
|
||||
family, found := ti.tagsByIfd[it.IfdPath]
|
||||
if found == false {
|
||||
family = make(map[uint16]*IndexedTag)
|
||||
ti.tagsByIfd[it.IfdPath] = family
|
||||
}
|
||||
|
||||
if _, found := family[it.Id]; found == true {
|
||||
log.Panicf("tag-ID defined more than once for IFD [%s]: (%02x)", it.IfdPath, it.Id)
|
||||
}
|
||||
|
||||
family[it.Id] = it
|
||||
|
||||
// Store by name.
|
||||
|
||||
familyR, found := ti.tagsByIfdR[it.IfdPath]
|
||||
if found == false {
|
||||
familyR = make(map[string]*IndexedTag)
|
||||
ti.tagsByIfdR[it.IfdPath] = familyR
|
||||
}
|
||||
|
||||
if _, found := familyR[it.Name]; found == true {
|
||||
log.Panicf("tag-name defined more than once for IFD [%s]: (%s)", it.IfdPath, it.Name)
|
||||
}
|
||||
|
||||
familyR[it.Name] = it
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get returns information about the non-IFD tag given a tag ID. `ifdPath` must
|
||||
// not be fully-qualified.
|
||||
func (ti *TagIndex) Get(ii *exifcommon.IfdIdentity, id uint16) (it *IndexedTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if len(ti.tagsByIfd) == 0 {
|
||||
err := LoadStandardTags(ti)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ifdPath := ii.UnindexedString()
|
||||
|
||||
family, found := ti.tagsByIfd[ifdPath]
|
||||
if found == false {
|
||||
return nil, ErrTagNotFound
|
||||
}
|
||||
|
||||
it, found = family[id]
|
||||
if found == false {
|
||||
return nil, ErrTagNotFound
|
||||
}
|
||||
|
||||
return it, nil
|
||||
}
|
||||
|
||||
var (
|
||||
// tagGuessDefaultIfdIdentities describes which IFDs we'll look for a given
|
||||
// tag-ID in, if it's not found where it's supposed to be. We suppose that
|
||||
// Exif-IFD tags might be found in IFD0 or IFD1, or IFD0/IFD1 tags might be
|
||||
// found in the Exif IFD. This is the only thing we've seen so far. So, this
|
||||
// is the limit of our guessing.
|
||||
tagGuessDefaultIfdIdentities = []*exifcommon.IfdIdentity{
|
||||
exifcommon.IfdExifStandardIfdIdentity,
|
||||
exifcommon.IfdStandardIfdIdentity,
|
||||
}
|
||||
)
|
||||
|
||||
// FindFirst looks for the given tag-ID in each of the given IFDs in the given
|
||||
// order. If `fqIfdPaths` is `nil` then use a default search order. This defies
|
||||
// the standard, which requires each tag to exist in certain IFDs. This is a
|
||||
// contingency to make recommendations for malformed data.
|
||||
//
|
||||
// Things *can* end badly here, in that the same tag-ID in different IFDs might
|
||||
// describe different data and different ata-types, and our decode might then
|
||||
// produce binary and non-printable data.
|
||||
func (ti *TagIndex) FindFirst(id uint16, typeId exifcommon.TagTypePrimitive, ifdIdentities []*exifcommon.IfdIdentity) (it *IndexedTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if ifdIdentities == nil {
|
||||
ifdIdentities = tagGuessDefaultIfdIdentities
|
||||
}
|
||||
|
||||
for _, ii := range ifdIdentities {
|
||||
it, err := ti.Get(ii, id)
|
||||
if err != nil {
|
||||
if err == ErrTagNotFound {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
// Even though the tag might be mislocated, the type should still be the
|
||||
// same. Check this so we don't accidentally end-up on a complete
|
||||
// irrelevant tag with a totally different data type. This attempts to
|
||||
// mitigate producing garbage.
|
||||
for _, supportedType := range it.SupportedTypes {
|
||||
if supportedType == typeId {
|
||||
return it, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, ErrTagNotFound
|
||||
}
|
||||
|
||||
// GetWithName returns information about the non-IFD tag given a tag name.
|
||||
func (ti *TagIndex) GetWithName(ii *exifcommon.IfdIdentity, name string) (it *IndexedTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if len(ti.tagsByIfdR) == 0 {
|
||||
err := LoadStandardTags(ti)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ifdPath := ii.UnindexedString()
|
||||
|
||||
it, found := ti.tagsByIfdR[ifdPath][name]
|
||||
if found != true {
|
||||
log.Panic(ErrTagNotFound)
|
||||
}
|
||||
|
||||
return it, nil
|
||||
}
|
||||
|
||||
// LoadStandardTags registers the tags that all devices/applications should
|
||||
// support.
|
||||
func LoadStandardTags(ti *TagIndex) (err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// Read static data.
|
||||
|
||||
encodedIfds := make(map[string][]encodedTag)
|
||||
|
||||
err = yaml.Unmarshal([]byte(tagsYaml), encodedIfds)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Load structure.
|
||||
|
||||
count := 0
|
||||
for ifdPath, tags := range encodedIfds {
|
||||
for _, tagInfo := range tags {
|
||||
tagId := uint16(tagInfo.Id)
|
||||
tagName := tagInfo.Name
|
||||
tagTypeName := tagInfo.TypeName
|
||||
tagTypeNames := tagInfo.TypeNames
|
||||
|
||||
if tagTypeNames == nil {
|
||||
if tagTypeName == "" {
|
||||
log.Panicf("no tag-types were given when registering standard tag [%s] (0x%04x) [%s]", ifdPath, tagId, tagName)
|
||||
}
|
||||
|
||||
tagTypeNames = []string{
|
||||
tagTypeName,
|
||||
}
|
||||
} else if tagTypeName != "" {
|
||||
log.Panicf("both 'type_names' and 'type_name' were given when registering standard tag [%s] (0x%04x) [%s]", ifdPath, tagId, tagName)
|
||||
}
|
||||
|
||||
tagTypes := make([]exifcommon.TagTypePrimitive, 0)
|
||||
for _, tagTypeName := range tagTypeNames {
|
||||
|
||||
// TODO(dustin): Discard unsupported types. This helps us with non-standard types that have actually been found in real data, that we ignore for right now. e.g. SSHORT, FLOAT, DOUBLE
|
||||
tagTypeId, found := exifcommon.GetTypeByName(tagTypeName)
|
||||
if found == false {
|
||||
tagsLogger.Warningf(nil, "Type [%s] for tag [%s] being loaded is not valid and is being ignored.", tagTypeName, tagName)
|
||||
continue
|
||||
}
|
||||
|
||||
tagTypes = append(tagTypes, tagTypeId)
|
||||
}
|
||||
|
||||
if len(tagTypes) == 0 {
|
||||
tagsLogger.Warningf(nil, "Tag [%s] (0x%04x) [%s] being loaded does not have any supported types and will not be registered.", ifdPath, tagId, tagName)
|
||||
continue
|
||||
}
|
||||
|
||||
it := &IndexedTag{
|
||||
IfdPath: ifdPath,
|
||||
Id: tagId,
|
||||
Name: tagName,
|
||||
SupportedTypes: tagTypes,
|
||||
}
|
||||
|
||||
err = ti.Add(it)
|
||||
log.PanicIf(err)
|
||||
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
tagsLogger.Debugf(nil, "(%d) tags loaded.", count)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,929 @@
|
|||
package exif
|
||||
|
||||
var (
|
||||
// From assets/tags.yaml . Needs to be here so it's embedded in the binary.
|
||||
tagsYaml = `
|
||||
# Notes:
|
||||
#
|
||||
# This file was produced from http://www.exiv2.org/tags.html, using the included
|
||||
# tool, though that document appears to have some duplicates when all IDs are
|
||||
# supposed to be unique (EXIF information only has IDs, not IFDs; IFDs are
|
||||
# determined by our pre-existing knowledge of those tags).
|
||||
#
|
||||
# The webpage that we've produced this file from appears to indicate that
|
||||
# ImageWidth is represented by both 0x0100 and 0x0001 depending on whether the
|
||||
# encoding is RGB or YCbCr.
|
||||
IFD/Exif:
|
||||
- id: 0x829a
|
||||
name: ExposureTime
|
||||
type_name: RATIONAL
|
||||
- id: 0x829d
|
||||
name: FNumber
|
||||
type_name: RATIONAL
|
||||
- id: 0x8822
|
||||
name: ExposureProgram
|
||||
type_name: SHORT
|
||||
- id: 0x8824
|
||||
name: SpectralSensitivity
|
||||
type_name: ASCII
|
||||
- id: 0x8827
|
||||
name: ISOSpeedRatings
|
||||
type_name: SHORT
|
||||
- id: 0x8828
|
||||
name: OECF
|
||||
type_name: UNDEFINED
|
||||
- id: 0x8830
|
||||
name: SensitivityType
|
||||
type_name: SHORT
|
||||
- id: 0x8831
|
||||
name: StandardOutputSensitivity
|
||||
type_name: LONG
|
||||
- id: 0x8832
|
||||
name: RecommendedExposureIndex
|
||||
type_name: LONG
|
||||
- id: 0x8833
|
||||
name: ISOSpeed
|
||||
type_name: LONG
|
||||
- id: 0x8834
|
||||
name: ISOSpeedLatitudeyyy
|
||||
type_name: LONG
|
||||
- id: 0x8835
|
||||
name: ISOSpeedLatitudezzz
|
||||
type_name: LONG
|
||||
- id: 0x9000
|
||||
name: ExifVersion
|
||||
type_name: UNDEFINED
|
||||
- id: 0x9003
|
||||
name: DateTimeOriginal
|
||||
type_name: ASCII
|
||||
- id: 0x9004
|
||||
name: DateTimeDigitized
|
||||
type_name: ASCII
|
||||
- id: 0x9101
|
||||
name: ComponentsConfiguration
|
||||
type_name: UNDEFINED
|
||||
- id: 0x9102
|
||||
name: CompressedBitsPerPixel
|
||||
type_name: RATIONAL
|
||||
- id: 0x9201
|
||||
name: ShutterSpeedValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9202
|
||||
name: ApertureValue
|
||||
type_name: RATIONAL
|
||||
- id: 0x9203
|
||||
name: BrightnessValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9204
|
||||
name: ExposureBiasValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9205
|
||||
name: MaxApertureValue
|
||||
type_name: RATIONAL
|
||||
- id: 0x9206
|
||||
name: SubjectDistance
|
||||
type_name: RATIONAL
|
||||
- id: 0x9207
|
||||
name: MeteringMode
|
||||
type_name: SHORT
|
||||
- id: 0x9208
|
||||
name: LightSource
|
||||
type_name: SHORT
|
||||
- id: 0x9209
|
||||
name: Flash
|
||||
type_name: SHORT
|
||||
- id: 0x920a
|
||||
name: FocalLength
|
||||
type_name: RATIONAL
|
||||
- id: 0x9214
|
||||
name: SubjectArea
|
||||
type_name: SHORT
|
||||
- id: 0x927c
|
||||
name: MakerNote
|
||||
type_name: UNDEFINED
|
||||
- id: 0x9286
|
||||
name: UserComment
|
||||
type_name: UNDEFINED
|
||||
- id: 0x9290
|
||||
name: SubSecTime
|
||||
type_name: ASCII
|
||||
- id: 0x9291
|
||||
name: SubSecTimeOriginal
|
||||
type_name: ASCII
|
||||
- id: 0x9292
|
||||
name: SubSecTimeDigitized
|
||||
type_name: ASCII
|
||||
- id: 0xa000
|
||||
name: FlashpixVersion
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa001
|
||||
name: ColorSpace
|
||||
type_name: SHORT
|
||||
- id: 0xa002
|
||||
name: PixelXDimension
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0xa003
|
||||
name: PixelYDimension
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0xa004
|
||||
name: RelatedSoundFile
|
||||
type_name: ASCII
|
||||
- id: 0xa005
|
||||
name: InteroperabilityTag
|
||||
type_name: LONG
|
||||
- id: 0xa20b
|
||||
name: FlashEnergy
|
||||
type_name: RATIONAL
|
||||
- id: 0xa20c
|
||||
name: SpatialFrequencyResponse
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa20e
|
||||
name: FocalPlaneXResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0xa20f
|
||||
name: FocalPlaneYResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0xa210
|
||||
name: FocalPlaneResolutionUnit
|
||||
type_name: SHORT
|
||||
- id: 0xa214
|
||||
name: SubjectLocation
|
||||
type_name: SHORT
|
||||
- id: 0xa215
|
||||
name: ExposureIndex
|
||||
type_name: RATIONAL
|
||||
- id: 0xa217
|
||||
name: SensingMethod
|
||||
type_name: SHORT
|
||||
- id: 0xa300
|
||||
name: FileSource
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa301
|
||||
name: SceneType
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa302
|
||||
name: CFAPattern
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa401
|
||||
name: CustomRendered
|
||||
type_name: SHORT
|
||||
- id: 0xa402
|
||||
name: ExposureMode
|
||||
type_name: SHORT
|
||||
- id: 0xa403
|
||||
name: WhiteBalance
|
||||
type_name: SHORT
|
||||
- id: 0xa404
|
||||
name: DigitalZoomRatio
|
||||
type_name: RATIONAL
|
||||
- id: 0xa405
|
||||
name: FocalLengthIn35mmFilm
|
||||
type_name: SHORT
|
||||
- id: 0xa406
|
||||
name: SceneCaptureType
|
||||
type_name: SHORT
|
||||
- id: 0xa407
|
||||
name: GainControl
|
||||
type_name: SHORT
|
||||
- id: 0xa408
|
||||
name: Contrast
|
||||
type_name: SHORT
|
||||
- id: 0xa409
|
||||
name: Saturation
|
||||
type_name: SHORT
|
||||
- id: 0xa40a
|
||||
name: Sharpness
|
||||
type_name: SHORT
|
||||
- id: 0xa40b
|
||||
name: DeviceSettingDescription
|
||||
type_name: UNDEFINED
|
||||
- id: 0xa40c
|
||||
name: SubjectDistanceRange
|
||||
type_name: SHORT
|
||||
- id: 0xa420
|
||||
name: ImageUniqueID
|
||||
type_name: ASCII
|
||||
- id: 0xa430
|
||||
name: CameraOwnerName
|
||||
type_name: ASCII
|
||||
- id: 0xa431
|
||||
name: BodySerialNumber
|
||||
type_name: ASCII
|
||||
- id: 0xa432
|
||||
name: LensSpecification
|
||||
type_name: RATIONAL
|
||||
- id: 0xa433
|
||||
name: LensMake
|
||||
type_name: ASCII
|
||||
- id: 0xa434
|
||||
name: LensModel
|
||||
type_name: ASCII
|
||||
- id: 0xa435
|
||||
name: LensSerialNumber
|
||||
type_name: ASCII
|
||||
IFD/GPSInfo:
|
||||
- id: 0x0000
|
||||
name: GPSVersionID
|
||||
type_name: BYTE
|
||||
- id: 0x0001
|
||||
name: GPSLatitudeRef
|
||||
type_name: ASCII
|
||||
- id: 0x0002
|
||||
name: GPSLatitude
|
||||
type_name: RATIONAL
|
||||
- id: 0x0003
|
||||
name: GPSLongitudeRef
|
||||
type_name: ASCII
|
||||
- id: 0x0004
|
||||
name: GPSLongitude
|
||||
type_name: RATIONAL
|
||||
- id: 0x0005
|
||||
name: GPSAltitudeRef
|
||||
type_name: BYTE
|
||||
- id: 0x0006
|
||||
name: GPSAltitude
|
||||
type_name: RATIONAL
|
||||
- id: 0x0007
|
||||
name: GPSTimeStamp
|
||||
type_name: RATIONAL
|
||||
- id: 0x0008
|
||||
name: GPSSatellites
|
||||
type_name: ASCII
|
||||
- id: 0x0009
|
||||
name: GPSStatus
|
||||
type_name: ASCII
|
||||
- id: 0x000a
|
||||
name: GPSMeasureMode
|
||||
type_name: ASCII
|
||||
- id: 0x000b
|
||||
name: GPSDOP
|
||||
type_name: RATIONAL
|
||||
- id: 0x000c
|
||||
name: GPSSpeedRef
|
||||
type_name: ASCII
|
||||
- id: 0x000d
|
||||
name: GPSSpeed
|
||||
type_name: RATIONAL
|
||||
- id: 0x000e
|
||||
name: GPSTrackRef
|
||||
type_name: ASCII
|
||||
- id: 0x000f
|
||||
name: GPSTrack
|
||||
type_name: RATIONAL
|
||||
- id: 0x0010
|
||||
name: GPSImgDirectionRef
|
||||
type_name: ASCII
|
||||
- id: 0x0011
|
||||
name: GPSImgDirection
|
||||
type_name: RATIONAL
|
||||
- id: 0x0012
|
||||
name: GPSMapDatum
|
||||
type_name: ASCII
|
||||
- id: 0x0013
|
||||
name: GPSDestLatitudeRef
|
||||
type_name: ASCII
|
||||
- id: 0x0014
|
||||
name: GPSDestLatitude
|
||||
type_name: RATIONAL
|
||||
- id: 0x0015
|
||||
name: GPSDestLongitudeRef
|
||||
type_name: ASCII
|
||||
- id: 0x0016
|
||||
name: GPSDestLongitude
|
||||
type_name: RATIONAL
|
||||
- id: 0x0017
|
||||
name: GPSDestBearingRef
|
||||
type_name: ASCII
|
||||
- id: 0x0018
|
||||
name: GPSDestBearing
|
||||
type_name: RATIONAL
|
||||
- id: 0x0019
|
||||
name: GPSDestDistanceRef
|
||||
type_name: ASCII
|
||||
- id: 0x001a
|
||||
name: GPSDestDistance
|
||||
type_name: RATIONAL
|
||||
- id: 0x001b
|
||||
name: GPSProcessingMethod
|
||||
type_name: UNDEFINED
|
||||
- id: 0x001c
|
||||
name: GPSAreaInformation
|
||||
type_name: UNDEFINED
|
||||
- id: 0x001d
|
||||
name: GPSDateStamp
|
||||
type_name: ASCII
|
||||
- id: 0x001e
|
||||
name: GPSDifferential
|
||||
type_name: SHORT
|
||||
IFD:
|
||||
- id: 0x000b
|
||||
name: ProcessingSoftware
|
||||
type_name: ASCII
|
||||
- id: 0x00fe
|
||||
name: NewSubfileType
|
||||
type_name: LONG
|
||||
- id: 0x00ff
|
||||
name: SubfileType
|
||||
type_name: SHORT
|
||||
- id: 0x0100
|
||||
name: ImageWidth
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0x0101
|
||||
name: ImageLength
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0x0102
|
||||
name: BitsPerSample
|
||||
type_name: SHORT
|
||||
- id: 0x0103
|
||||
name: Compression
|
||||
type_name: SHORT
|
||||
- id: 0x0106
|
||||
name: PhotometricInterpretation
|
||||
type_name: SHORT
|
||||
- id: 0x0107
|
||||
name: Thresholding
|
||||
type_name: SHORT
|
||||
- id: 0x0108
|
||||
name: CellWidth
|
||||
type_name: SHORT
|
||||
- id: 0x0109
|
||||
name: CellLength
|
||||
type_name: SHORT
|
||||
- id: 0x010a
|
||||
name: FillOrder
|
||||
type_name: SHORT
|
||||
- id: 0x010d
|
||||
name: DocumentName
|
||||
type_name: ASCII
|
||||
- id: 0x010e
|
||||
name: ImageDescription
|
||||
type_name: ASCII
|
||||
- id: 0x010f
|
||||
name: Make
|
||||
type_name: ASCII
|
||||
- id: 0x0110
|
||||
name: Model
|
||||
type_name: ASCII
|
||||
- id: 0x0111
|
||||
name: StripOffsets
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0x0112
|
||||
name: Orientation
|
||||
type_name: SHORT
|
||||
- id: 0x0115
|
||||
name: SamplesPerPixel
|
||||
type_name: SHORT
|
||||
- id: 0x0116
|
||||
name: RowsPerStrip
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0x0117
|
||||
name: StripByteCounts
|
||||
type_names: [LONG, SHORT]
|
||||
- id: 0x011a
|
||||
name: XResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0x011b
|
||||
name: YResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0x011c
|
||||
name: PlanarConfiguration
|
||||
type_name: SHORT
|
||||
- id: 0x0122
|
||||
name: GrayResponseUnit
|
||||
type_name: SHORT
|
||||
- id: 0x0123
|
||||
name: GrayResponseCurve
|
||||
type_name: SHORT
|
||||
- id: 0x0124
|
||||
name: T4Options
|
||||
type_name: LONG
|
||||
- id: 0x0125
|
||||
name: T6Options
|
||||
type_name: LONG
|
||||
- id: 0x0128
|
||||
name: ResolutionUnit
|
||||
type_name: SHORT
|
||||
- id: 0x0129
|
||||
name: PageNumber
|
||||
type_name: SHORT
|
||||
- id: 0x012d
|
||||
name: TransferFunction
|
||||
type_name: SHORT
|
||||
- id: 0x0131
|
||||
name: Software
|
||||
type_name: ASCII
|
||||
- id: 0x0132
|
||||
name: DateTime
|
||||
type_name: ASCII
|
||||
- id: 0x013b
|
||||
name: Artist
|
||||
type_name: ASCII
|
||||
- id: 0x013c
|
||||
name: HostComputer
|
||||
type_name: ASCII
|
||||
- id: 0x013d
|
||||
name: Predictor
|
||||
type_name: SHORT
|
||||
- id: 0x013e
|
||||
name: WhitePoint
|
||||
type_name: RATIONAL
|
||||
- id: 0x013f
|
||||
name: PrimaryChromaticities
|
||||
type_name: RATIONAL
|
||||
- id: 0x0140
|
||||
name: ColorMap
|
||||
type_name: SHORT
|
||||
- id: 0x0141
|
||||
name: HalftoneHints
|
||||
type_name: SHORT
|
||||
- id: 0x0142
|
||||
name: TileWidth
|
||||
type_name: SHORT
|
||||
- id: 0x0143
|
||||
name: TileLength
|
||||
type_name: SHORT
|
||||
- id: 0x0144
|
||||
name: TileOffsets
|
||||
type_name: SHORT
|
||||
- id: 0x0145
|
||||
name: TileByteCounts
|
||||
type_name: SHORT
|
||||
- id: 0x014a
|
||||
name: SubIFDs
|
||||
type_name: LONG
|
||||
- id: 0x014c
|
||||
name: InkSet
|
||||
type_name: SHORT
|
||||
- id: 0x014d
|
||||
name: InkNames
|
||||
type_name: ASCII
|
||||
- id: 0x014e
|
||||
name: NumberOfInks
|
||||
type_name: SHORT
|
||||
- id: 0x0150
|
||||
name: DotRange
|
||||
type_name: BYTE
|
||||
- id: 0x0151
|
||||
name: TargetPrinter
|
||||
type_name: ASCII
|
||||
- id: 0x0152
|
||||
name: ExtraSamples
|
||||
type_name: SHORT
|
||||
- id: 0x0153
|
||||
name: SampleFormat
|
||||
type_name: SHORT
|
||||
- id: 0x0154
|
||||
name: SMinSampleValue
|
||||
type_name: SHORT
|
||||
- id: 0x0155
|
||||
name: SMaxSampleValue
|
||||
type_name: SHORT
|
||||
- id: 0x0156
|
||||
name: TransferRange
|
||||
type_name: SHORT
|
||||
- id: 0x0157
|
||||
name: ClipPath
|
||||
type_name: BYTE
|
||||
- id: 0x015a
|
||||
name: Indexed
|
||||
type_name: SHORT
|
||||
- id: 0x015b
|
||||
name: JPEGTables
|
||||
type_name: UNDEFINED
|
||||
- id: 0x015f
|
||||
name: OPIProxy
|
||||
type_name: SHORT
|
||||
- id: 0x0200
|
||||
name: JPEGProc
|
||||
type_name: LONG
|
||||
- id: 0x0201
|
||||
name: JPEGInterchangeFormat
|
||||
type_name: LONG
|
||||
- id: 0x0202
|
||||
name: JPEGInterchangeFormatLength
|
||||
type_name: LONG
|
||||
- id: 0x0203
|
||||
name: JPEGRestartInterval
|
||||
type_name: SHORT
|
||||
- id: 0x0205
|
||||
name: JPEGLosslessPredictors
|
||||
type_name: SHORT
|
||||
- id: 0x0206
|
||||
name: JPEGPointTransforms
|
||||
type_name: SHORT
|
||||
- id: 0x0207
|
||||
name: JPEGQTables
|
||||
type_name: LONG
|
||||
- id: 0x0208
|
||||
name: JPEGDCTables
|
||||
type_name: LONG
|
||||
- id: 0x0209
|
||||
name: JPEGACTables
|
||||
type_name: LONG
|
||||
- id: 0x0211
|
||||
name: YCbCrCoefficients
|
||||
type_name: RATIONAL
|
||||
- id: 0x0212
|
||||
name: YCbCrSubSampling
|
||||
type_name: SHORT
|
||||
- id: 0x0213
|
||||
name: YCbCrPositioning
|
||||
type_name: SHORT
|
||||
- id: 0x0214
|
||||
name: ReferenceBlackWhite
|
||||
type_name: RATIONAL
|
||||
- id: 0x02bc
|
||||
name: XMLPacket
|
||||
type_name: BYTE
|
||||
- id: 0x4746
|
||||
name: Rating
|
||||
type_name: SHORT
|
||||
- id: 0x4749
|
||||
name: RatingPercent
|
||||
type_name: SHORT
|
||||
- id: 0x800d
|
||||
name: ImageID
|
||||
type_name: ASCII
|
||||
- id: 0x828d
|
||||
name: CFARepeatPatternDim
|
||||
type_name: SHORT
|
||||
- id: 0x828e
|
||||
name: CFAPattern
|
||||
type_name: BYTE
|
||||
- id: 0x828f
|
||||
name: BatteryLevel
|
||||
type_name: RATIONAL
|
||||
- id: 0x8298
|
||||
name: Copyright
|
||||
type_name: ASCII
|
||||
- id: 0x829a
|
||||
name: ExposureTime
|
||||
# NOTE(dustin): SRATIONAL isn't mentioned in the standard, but we have seen it in real data.
|
||||
type_names: [RATIONAL, SRATIONAL]
|
||||
- id: 0x829d
|
||||
name: FNumber
|
||||
# NOTE(dustin): SRATIONAL isn't mentioned in the standard, but we have seen it in real data.
|
||||
type_names: [RATIONAL, SRATIONAL]
|
||||
- id: 0x83bb
|
||||
name: IPTCNAA
|
||||
type_name: LONG
|
||||
- id: 0x8649
|
||||
name: ImageResources
|
||||
type_name: BYTE
|
||||
- id: 0x8769
|
||||
name: ExifTag
|
||||
type_name: LONG
|
||||
- id: 0x8773
|
||||
name: InterColorProfile
|
||||
type_name: UNDEFINED
|
||||
- id: 0x8822
|
||||
name: ExposureProgram
|
||||
type_name: SHORT
|
||||
- id: 0x8824
|
||||
name: SpectralSensitivity
|
||||
type_name: ASCII
|
||||
- id: 0x8825
|
||||
name: GPSTag
|
||||
type_name: LONG
|
||||
- id: 0x8827
|
||||
name: ISOSpeedRatings
|
||||
type_name: SHORT
|
||||
- id: 0x8828
|
||||
name: OECF
|
||||
type_name: UNDEFINED
|
||||
- id: 0x8829
|
||||
name: Interlace
|
||||
type_name: SHORT
|
||||
- id: 0x882b
|
||||
name: SelfTimerMode
|
||||
type_name: SHORT
|
||||
- id: 0x9003
|
||||
name: DateTimeOriginal
|
||||
type_name: ASCII
|
||||
- id: 0x9102
|
||||
name: CompressedBitsPerPixel
|
||||
type_name: RATIONAL
|
||||
- id: 0x9201
|
||||
name: ShutterSpeedValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9202
|
||||
name: ApertureValue
|
||||
type_name: RATIONAL
|
||||
- id: 0x9203
|
||||
name: BrightnessValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9204
|
||||
name: ExposureBiasValue
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9205
|
||||
name: MaxApertureValue
|
||||
type_name: RATIONAL
|
||||
- id: 0x9206
|
||||
name: SubjectDistance
|
||||
type_name: SRATIONAL
|
||||
- id: 0x9207
|
||||
name: MeteringMode
|
||||
type_name: SHORT
|
||||
- id: 0x9208
|
||||
name: LightSource
|
||||
type_name: SHORT
|
||||
- id: 0x9209
|
||||
name: Flash
|
||||
type_name: SHORT
|
||||
- id: 0x920a
|
||||
name: FocalLength
|
||||
type_name: RATIONAL
|
||||
- id: 0x920b
|
||||
name: FlashEnergy
|
||||
type_name: RATIONAL
|
||||
- id: 0x920c
|
||||
name: SpatialFrequencyResponse
|
||||
type_name: UNDEFINED
|
||||
- id: 0x920d
|
||||
name: Noise
|
||||
type_name: UNDEFINED
|
||||
- id: 0x920e
|
||||
name: FocalPlaneXResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0x920f
|
||||
name: FocalPlaneYResolution
|
||||
type_name: RATIONAL
|
||||
- id: 0x9210
|
||||
name: FocalPlaneResolutionUnit
|
||||
type_name: SHORT
|
||||
- id: 0x9211
|
||||
name: ImageNumber
|
||||
type_name: LONG
|
||||
- id: 0x9212
|
||||
name: SecurityClassification
|
||||
type_name: ASCII
|
||||
- id: 0x9213
|
||||
name: ImageHistory
|
||||
type_name: ASCII
|
||||
- id: 0x9214
|
||||
name: SubjectLocation
|
||||
type_name: SHORT
|
||||
- id: 0x9215
|
||||
name: ExposureIndex
|
||||
type_name: RATIONAL
|
||||
- id: 0x9216
|
||||
name: TIFFEPStandardID
|
||||
type_name: BYTE
|
||||
- id: 0x9217
|
||||
name: SensingMethod
|
||||
type_name: SHORT
|
||||
- id: 0x9c9b
|
||||
name: XPTitle
|
||||
type_name: BYTE
|
||||
- id: 0x9c9c
|
||||
name: XPComment
|
||||
type_name: BYTE
|
||||
- id: 0x9c9d
|
||||
name: XPAuthor
|
||||
type_name: BYTE
|
||||
- id: 0x9c9e
|
||||
name: XPKeywords
|
||||
type_name: BYTE
|
||||
- id: 0x9c9f
|
||||
name: XPSubject
|
||||
type_name: BYTE
|
||||
- id: 0xc4a5
|
||||
name: PrintImageMatching
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc612
|
||||
name: DNGVersion
|
||||
type_name: BYTE
|
||||
- id: 0xc613
|
||||
name: DNGBackwardVersion
|
||||
type_name: BYTE
|
||||
- id: 0xc614
|
||||
name: UniqueCameraModel
|
||||
type_name: ASCII
|
||||
- id: 0xc615
|
||||
name: LocalizedCameraModel
|
||||
type_name: BYTE
|
||||
- id: 0xc616
|
||||
name: CFAPlaneColor
|
||||
type_name: BYTE
|
||||
- id: 0xc617
|
||||
name: CFALayout
|
||||
type_name: SHORT
|
||||
- id: 0xc618
|
||||
name: LinearizationTable
|
||||
type_name: SHORT
|
||||
- id: 0xc619
|
||||
name: BlackLevelRepeatDim
|
||||
type_name: SHORT
|
||||
- id: 0xc61a
|
||||
name: BlackLevel
|
||||
type_name: RATIONAL
|
||||
- id: 0xc61b
|
||||
name: BlackLevelDeltaH
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc61c
|
||||
name: BlackLevelDeltaV
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc61d
|
||||
name: WhiteLevel
|
||||
type_name: SHORT
|
||||
- id: 0xc61e
|
||||
name: DefaultScale
|
||||
type_name: RATIONAL
|
||||
- id: 0xc61f
|
||||
name: DefaultCropOrigin
|
||||
type_name: SHORT
|
||||
- id: 0xc620
|
||||
name: DefaultCropSize
|
||||
type_name: SHORT
|
||||
- id: 0xc621
|
||||
name: ColorMatrix1
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc622
|
||||
name: ColorMatrix2
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc623
|
||||
name: CameraCalibration1
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc624
|
||||
name: CameraCalibration2
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc625
|
||||
name: ReductionMatrix1
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc626
|
||||
name: ReductionMatrix2
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc627
|
||||
name: AnalogBalance
|
||||
type_name: RATIONAL
|
||||
- id: 0xc628
|
||||
name: AsShotNeutral
|
||||
type_name: SHORT
|
||||
- id: 0xc629
|
||||
name: AsShotWhiteXY
|
||||
type_name: RATIONAL
|
||||
- id: 0xc62a
|
||||
name: BaselineExposure
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc62b
|
||||
name: BaselineNoise
|
||||
type_name: RATIONAL
|
||||
- id: 0xc62c
|
||||
name: BaselineSharpness
|
||||
type_name: RATIONAL
|
||||
- id: 0xc62d
|
||||
name: BayerGreenSplit
|
||||
type_name: LONG
|
||||
- id: 0xc62e
|
||||
name: LinearResponseLimit
|
||||
type_name: RATIONAL
|
||||
- id: 0xc62f
|
||||
name: CameraSerialNumber
|
||||
type_name: ASCII
|
||||
- id: 0xc630
|
||||
name: LensInfo
|
||||
type_name: RATIONAL
|
||||
- id: 0xc631
|
||||
name: ChromaBlurRadius
|
||||
type_name: RATIONAL
|
||||
- id: 0xc632
|
||||
name: AntiAliasStrength
|
||||
type_name: RATIONAL
|
||||
- id: 0xc633
|
||||
name: ShadowScale
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc634
|
||||
name: DNGPrivateData
|
||||
type_name: BYTE
|
||||
- id: 0xc635
|
||||
name: MakerNoteSafety
|
||||
type_name: SHORT
|
||||
- id: 0xc65a
|
||||
name: CalibrationIlluminant1
|
||||
type_name: SHORT
|
||||
- id: 0xc65b
|
||||
name: CalibrationIlluminant2
|
||||
type_name: SHORT
|
||||
- id: 0xc65c
|
||||
name: BestQualityScale
|
||||
type_name: RATIONAL
|
||||
- id: 0xc65d
|
||||
name: RawDataUniqueID
|
||||
type_name: BYTE
|
||||
- id: 0xc68b
|
||||
name: OriginalRawFileName
|
||||
type_name: BYTE
|
||||
- id: 0xc68c
|
||||
name: OriginalRawFileData
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc68d
|
||||
name: ActiveArea
|
||||
type_name: SHORT
|
||||
- id: 0xc68e
|
||||
name: MaskedAreas
|
||||
type_name: SHORT
|
||||
- id: 0xc68f
|
||||
name: AsShotICCProfile
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc690
|
||||
name: AsShotPreProfileMatrix
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc691
|
||||
name: CurrentICCProfile
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc692
|
||||
name: CurrentPreProfileMatrix
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc6bf
|
||||
name: ColorimetricReference
|
||||
type_name: SHORT
|
||||
- id: 0xc6f3
|
||||
name: CameraCalibrationSignature
|
||||
type_name: BYTE
|
||||
- id: 0xc6f4
|
||||
name: ProfileCalibrationSignature
|
||||
type_name: BYTE
|
||||
- id: 0xc6f6
|
||||
name: AsShotProfileName
|
||||
type_name: BYTE
|
||||
- id: 0xc6f7
|
||||
name: NoiseReductionApplied
|
||||
type_name: RATIONAL
|
||||
- id: 0xc6f8
|
||||
name: ProfileName
|
||||
type_name: BYTE
|
||||
- id: 0xc6f9
|
||||
name: ProfileHueSatMapDims
|
||||
type_name: LONG
|
||||
- id: 0xc6fd
|
||||
name: ProfileEmbedPolicy
|
||||
type_name: LONG
|
||||
- id: 0xc6fe
|
||||
name: ProfileCopyright
|
||||
type_name: BYTE
|
||||
- id: 0xc714
|
||||
name: ForwardMatrix1
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc715
|
||||
name: ForwardMatrix2
|
||||
type_name: SRATIONAL
|
||||
- id: 0xc716
|
||||
name: PreviewApplicationName
|
||||
type_name: BYTE
|
||||
- id: 0xc717
|
||||
name: PreviewApplicationVersion
|
||||
type_name: BYTE
|
||||
- id: 0xc718
|
||||
name: PreviewSettingsName
|
||||
type_name: BYTE
|
||||
- id: 0xc719
|
||||
name: PreviewSettingsDigest
|
||||
type_name: BYTE
|
||||
- id: 0xc71a
|
||||
name: PreviewColorSpace
|
||||
type_name: LONG
|
||||
- id: 0xc71b
|
||||
name: PreviewDateTime
|
||||
type_name: ASCII
|
||||
- id: 0xc71c
|
||||
name: RawImageDigest
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc71d
|
||||
name: OriginalRawFileDigest
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc71e
|
||||
name: SubTileBlockSize
|
||||
type_name: LONG
|
||||
- id: 0xc71f
|
||||
name: RowInterleaveFactor
|
||||
type_name: LONG
|
||||
- id: 0xc725
|
||||
name: ProfileLookTableDims
|
||||
type_name: LONG
|
||||
- id: 0xc740
|
||||
name: OpcodeList1
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc741
|
||||
name: OpcodeList2
|
||||
type_name: UNDEFINED
|
||||
- id: 0xc74e
|
||||
name: OpcodeList3
|
||||
type_name: UNDEFINED
|
||||
IFD/Exif/Iop:
|
||||
- id: 0x0001
|
||||
name: InteroperabilityIndex
|
||||
type_name: ASCII
|
||||
- id: 0x0002
|
||||
name: InteroperabilityVersion
|
||||
type_name: UNDEFINED
|
||||
- id: 0x1000
|
||||
name: RelatedImageFileFormat
|
||||
type_name: ASCII
|
||||
- id: 0x1001
|
||||
name: RelatedImageWidth
|
||||
type_name: LONG
|
||||
- id: 0x1002
|
||||
name: RelatedImageLength
|
||||
type_name: LONG
|
||||
`
|
||||
)
|
|
@ -0,0 +1,182 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"path"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"io/ioutil"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
var (
|
||||
testExifData []byte
|
||||
)
|
||||
|
||||
func getExifSimpleTestIb() *IfdBuilder {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err := log.Wrap(state.(error))
|
||||
log.Panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
||||
err := LoadStandardIfds(im)
|
||||
log.PanicIf(err)
|
||||
|
||||
ti := NewTagIndex()
|
||||
ib := NewIfdBuilder(im, ti, exifcommon.IfdStandardIfdIdentity, exifcommon.TestDefaultByteOrder)
|
||||
|
||||
err = ib.AddStandard(0x000b, "asciivalue")
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x00ff, []uint16{0x1122})
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x0100, []uint32{0x33445566})
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x013e, []exifcommon.Rational{{Numerator: 0x11112222, Denominator: 0x33334444}})
|
||||
log.PanicIf(err)
|
||||
|
||||
return ib
|
||||
}
|
||||
|
||||
func getExifSimpleTestIbBytes() []byte {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err := log.Wrap(state.(error))
|
||||
log.Panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
||||
err := LoadStandardIfds(im)
|
||||
log.PanicIf(err)
|
||||
|
||||
ti := NewTagIndex()
|
||||
ib := NewIfdBuilder(im, ti, exifcommon.IfdStandardIfdIdentity, exifcommon.TestDefaultByteOrder)
|
||||
|
||||
err = ib.AddStandard(0x000b, "asciivalue")
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x00ff, []uint16{0x1122})
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x0100, []uint32{0x33445566})
|
||||
log.PanicIf(err)
|
||||
|
||||
err = ib.AddStandard(0x013e, []exifcommon.Rational{{Numerator: 0x11112222, Denominator: 0x33334444}})
|
||||
log.PanicIf(err)
|
||||
|
||||
ibe := NewIfdByteEncoder()
|
||||
|
||||
exifData, err := ibe.EncodeToExif(ib)
|
||||
log.PanicIf(err)
|
||||
|
||||
return exifData
|
||||
}
|
||||
|
||||
func validateExifSimpleTestIb(exifData []byte, t *testing.T) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err := log.Wrap(state.(error))
|
||||
log.Panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
||||
err := LoadStandardIfds(im)
|
||||
log.PanicIf(err)
|
||||
|
||||
ti := NewTagIndex()
|
||||
|
||||
eh, index, err := Collect(im, ti, exifData)
|
||||
log.PanicIf(err)
|
||||
|
||||
if eh.ByteOrder != exifcommon.TestDefaultByteOrder {
|
||||
t.Fatalf("EXIF byte-order is not correct: %v", eh.ByteOrder)
|
||||
} else if eh.FirstIfdOffset != ExifDefaultFirstIfdOffset {
|
||||
t.Fatalf("EXIF first IFD-offset not correct: (0x%02x)", eh.FirstIfdOffset)
|
||||
}
|
||||
|
||||
if len(index.Ifds) != 1 {
|
||||
t.Fatalf("There wasn't exactly one IFD decoded: (%d)", len(index.Ifds))
|
||||
}
|
||||
|
||||
ifd := index.RootIfd
|
||||
|
||||
if ifd.ByteOrder != exifcommon.TestDefaultByteOrder {
|
||||
t.Fatalf("IFD byte-order not correct.")
|
||||
} else if ifd.ifdIdentity.UnindexedString() != exifcommon.IfdStandardIfdIdentity.UnindexedString() {
|
||||
t.Fatalf("IFD name not correct.")
|
||||
} else if ifd.ifdIdentity.Index() != 0 {
|
||||
t.Fatalf("IFD index not zero: (%d)", ifd.ifdIdentity.Index())
|
||||
} else if ifd.Offset != uint32(0x0008) {
|
||||
t.Fatalf("IFD offset not correct.")
|
||||
} else if len(ifd.Entries) != 4 {
|
||||
t.Fatalf("IFD number of entries not correct: (%d)", len(ifd.Entries))
|
||||
} else if ifd.NextIfdOffset != uint32(0) {
|
||||
t.Fatalf("Next-IFD offset is non-zero.")
|
||||
} else if ifd.NextIfd != nil {
|
||||
t.Fatalf("Next-IFD pointer is non-nil.")
|
||||
}
|
||||
|
||||
// Verify the values by using the actual, original types (this is awesome).
|
||||
|
||||
expected := []struct {
|
||||
tagId uint16
|
||||
value interface{}
|
||||
}{
|
||||
{tagId: 0x000b, value: "asciivalue"},
|
||||
{tagId: 0x00ff, value: []uint16{0x1122}},
|
||||
{tagId: 0x0100, value: []uint32{0x33445566}},
|
||||
{tagId: 0x013e, value: []exifcommon.Rational{{Numerator: 0x11112222, Denominator: 0x33334444}}},
|
||||
}
|
||||
|
||||
for i, ite := range ifd.Entries {
|
||||
if ite.TagId() != expected[i].tagId {
|
||||
t.Fatalf("Tag-ID for entry (%d) not correct: (0x%02x) != (0x%02x)", i, ite.TagId(), expected[i].tagId)
|
||||
}
|
||||
|
||||
value, err := ite.Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
if reflect.DeepEqual(value, expected[i].value) != true {
|
||||
t.Fatalf("Value for entry (%d) not correct: [%v] != [%v]", i, value, expected[i].value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getTestImageFilepath() string {
|
||||
assetsPath := exifcommon.GetTestAssetsPath()
|
||||
testImageFilepath := path.Join(assetsPath, "NDM_8901.jpg")
|
||||
return testImageFilepath
|
||||
}
|
||||
|
||||
func getTestExifData() []byte {
|
||||
if testExifData == nil {
|
||||
assetsPath := exifcommon.GetTestAssetsPath()
|
||||
filepath := path.Join(assetsPath, "NDM_8901.jpg.exif")
|
||||
|
||||
var err error
|
||||
|
||||
testExifData, err = ioutil.ReadFile(filepath)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
return testExifData
|
||||
}
|
||||
|
||||
func getTestGpsImageFilepath() string {
|
||||
assetsPath := exifcommon.GetTestAssetsPath()
|
||||
testGpsImageFilepath := path.Join(assetsPath, "gps.jpg")
|
||||
return testGpsImageFilepath
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
## 0xa40b
|
||||
|
||||
The specification is not specific/clear enough to be handled. Without a working example ,we're deferring until some point in the future when either we or someone else has a better understanding.
|
|
@ -0,0 +1,62 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
// Encode encodes the given encodeable undefined value to bytes.
|
||||
func Encode(value EncodeableValue, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
encoderName := value.EncoderName()
|
||||
|
||||
encoder, found := encoders[encoderName]
|
||||
if found == false {
|
||||
log.Panicf("no encoder registered for type [%s]", encoderName)
|
||||
}
|
||||
|
||||
encoded, unitCount, err = encoder.Encode(value, byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return encoded, unitCount, nil
|
||||
}
|
||||
|
||||
// Decode constructs a value from raw encoded bytes
|
||||
func Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
uth := UndefinedTagHandle{
|
||||
IfdPath: valueContext.IfdPath(),
|
||||
TagId: valueContext.TagId(),
|
||||
}
|
||||
|
||||
decoder, found := decoders[uth]
|
||||
if found == false {
|
||||
// We have no choice but to return the error. We have no way of knowing how
|
||||
// much data there is without already knowing what data-type this tag is.
|
||||
return nil, exifcommon.ErrUnhandledUndefinedTypedTag
|
||||
}
|
||||
|
||||
value, err = decoder.Decode(valueContext)
|
||||
if err != nil {
|
||||
if err == ErrUnparseableValue {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag8828Oecf struct {
|
||||
Columns uint16
|
||||
Rows uint16
|
||||
ColumnNames []string
|
||||
Values []exifcommon.SignedRational
|
||||
}
|
||||
|
||||
func (oecf Tag8828Oecf) String() string {
|
||||
return fmt.Sprintf("Tag8828Oecf<COLUMNS=(%d) ROWS=(%d)>", oecf.Columns, oecf.Rows)
|
||||
}
|
||||
|
||||
func (oecf Tag8828Oecf) EncoderName() string {
|
||||
return "Codec8828Oecf"
|
||||
}
|
||||
|
||||
type Codec8828Oecf struct {
|
||||
}
|
||||
|
||||
func (Codec8828Oecf) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
oecf, ok := value.(Tag8828Oecf)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag8828Oecf")
|
||||
}
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
err = binary.Write(b, byteOrder, oecf.Columns)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, byteOrder, oecf.Rows)
|
||||
log.PanicIf(err)
|
||||
|
||||
for _, name := range oecf.ColumnNames {
|
||||
_, err := b.Write([]byte(name))
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write([]byte{0})
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ve := exifcommon.NewValueEncoder(byteOrder)
|
||||
|
||||
ed, err := ve.Encode(oecf.Values)
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write(ed.Encoded)
|
||||
log.PanicIf(err)
|
||||
|
||||
return b.Bytes(), uint32(b.Len()), nil
|
||||
}
|
||||
|
||||
func (Codec8828Oecf) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test using known good data.
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
oecf := Tag8828Oecf{}
|
||||
|
||||
oecf.Columns = valueContext.ByteOrder().Uint16(valueBytes[0:2])
|
||||
oecf.Rows = valueContext.ByteOrder().Uint16(valueBytes[2:4])
|
||||
|
||||
columnNames := make([]string, oecf.Columns)
|
||||
|
||||
// startAt is where the current column name starts.
|
||||
startAt := 4
|
||||
|
||||
// offset is our current position.
|
||||
offset := startAt
|
||||
|
||||
currentColumnNumber := uint16(0)
|
||||
|
||||
for currentColumnNumber < oecf.Columns {
|
||||
if valueBytes[offset] == 0 {
|
||||
columnName := string(valueBytes[startAt:offset])
|
||||
if len(columnName) == 0 {
|
||||
log.Panicf("SFR column (%d) has zero length", currentColumnNumber)
|
||||
}
|
||||
|
||||
columnNames[currentColumnNumber] = columnName
|
||||
currentColumnNumber++
|
||||
|
||||
offset++
|
||||
startAt = offset
|
||||
continue
|
||||
}
|
||||
|
||||
offset++
|
||||
}
|
||||
|
||||
oecf.ColumnNames = columnNames
|
||||
|
||||
rawRationalBytes := valueBytes[offset:]
|
||||
|
||||
rationalSize := exifcommon.TypeSignedRational.Size()
|
||||
if len(rawRationalBytes)%rationalSize > 0 {
|
||||
log.Panicf("OECF signed-rationals not aligned: (%d) %% (%d) > 0", len(rawRationalBytes), rationalSize)
|
||||
}
|
||||
|
||||
rationalCount := len(rawRationalBytes) / rationalSize
|
||||
|
||||
parser := new(exifcommon.Parser)
|
||||
|
||||
byteOrder := valueContext.ByteOrder()
|
||||
|
||||
items, err := parser.ParseSignedRationals(rawRationalBytes, uint32(rationalCount), byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
oecf.Values = items
|
||||
|
||||
return oecf, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x8828,
|
||||
Codec8828Oecf{})
|
||||
}
|
69
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9000_exif_version.go
generated
vendored
100644
69
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9000_exif_version.go
generated
vendored
100644
|
@ -0,0 +1,69 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag9000ExifVersion struct {
|
||||
ExifVersion string
|
||||
}
|
||||
|
||||
func (Tag9000ExifVersion) EncoderName() string {
|
||||
return "Codec9000ExifVersion"
|
||||
}
|
||||
|
||||
func (ev Tag9000ExifVersion) String() string {
|
||||
return ev.ExifVersion
|
||||
}
|
||||
|
||||
type Codec9000ExifVersion struct {
|
||||
}
|
||||
|
||||
func (Codec9000ExifVersion) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
s, ok := value.(Tag9000ExifVersion)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag9000ExifVersion")
|
||||
}
|
||||
|
||||
return []byte(s.ExifVersion), uint32(len(s.ExifVersion)), nil
|
||||
}
|
||||
|
||||
func (Codec9000ExifVersion) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeAsciiNoNul)
|
||||
|
||||
valueString, err := valueContext.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
|
||||
ev := Tag9000ExifVersion{
|
||||
ExifVersion: valueString,
|
||||
}
|
||||
|
||||
return ev, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag9000ExifVersion{},
|
||||
Codec9000ExifVersion{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x9000,
|
||||
Codec9000ExifVersion{})
|
||||
}
|
123
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9101_components_configuration.go
generated
vendored
100644
123
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9101_components_configuration.go
generated
vendored
100644
|
@ -0,0 +1,123 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
const (
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Y = 0x1
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cb = 0x2
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cr = 0x3
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_R = 0x4
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_G = 0x5
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_B = 0x6
|
||||
)
|
||||
|
||||
const (
|
||||
TagUndefinedType_9101_ComponentsConfiguration_OTHER = iota
|
||||
TagUndefinedType_9101_ComponentsConfiguration_RGB = iota
|
||||
TagUndefinedType_9101_ComponentsConfiguration_YCBCR = iota
|
||||
)
|
||||
|
||||
var (
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Names = map[int]string{
|
||||
TagUndefinedType_9101_ComponentsConfiguration_OTHER: "OTHER",
|
||||
TagUndefinedType_9101_ComponentsConfiguration_RGB: "RGB",
|
||||
TagUndefinedType_9101_ComponentsConfiguration_YCBCR: "YCBCR",
|
||||
}
|
||||
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Configurations = map[int][]byte{
|
||||
TagUndefinedType_9101_ComponentsConfiguration_RGB: {
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_R,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_G,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_B,
|
||||
0,
|
||||
},
|
||||
|
||||
TagUndefinedType_9101_ComponentsConfiguration_YCBCR: {
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Y,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cb,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cr,
|
||||
0,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type TagExif9101ComponentsConfiguration struct {
|
||||
ConfigurationId int
|
||||
ConfigurationBytes []byte
|
||||
}
|
||||
|
||||
func (TagExif9101ComponentsConfiguration) EncoderName() string {
|
||||
return "CodecExif9101ComponentsConfiguration"
|
||||
}
|
||||
|
||||
func (cc TagExif9101ComponentsConfiguration) String() string {
|
||||
return fmt.Sprintf("Exif9101ComponentsConfiguration<ID=[%s] BYTES=%v>", TagUndefinedType_9101_ComponentsConfiguration_Names[cc.ConfigurationId], cc.ConfigurationBytes)
|
||||
}
|
||||
|
||||
type CodecExif9101ComponentsConfiguration struct {
|
||||
}
|
||||
|
||||
func (CodecExif9101ComponentsConfiguration) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
cc, ok := value.(TagExif9101ComponentsConfiguration)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagExif9101ComponentsConfiguration")
|
||||
}
|
||||
|
||||
return cc.ConfigurationBytes, uint32(len(cc.ConfigurationBytes)), nil
|
||||
}
|
||||
|
||||
func (CodecExif9101ComponentsConfiguration) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
for configurationId, configurationBytes := range TagUndefinedType_9101_ComponentsConfiguration_Configurations {
|
||||
if bytes.Equal(configurationBytes, valueBytes) == true {
|
||||
cc := TagExif9101ComponentsConfiguration{
|
||||
ConfigurationId: configurationId,
|
||||
ConfigurationBytes: valueBytes,
|
||||
}
|
||||
|
||||
return cc, nil
|
||||
}
|
||||
}
|
||||
|
||||
cc := TagExif9101ComponentsConfiguration{
|
||||
ConfigurationId: TagUndefinedType_9101_ComponentsConfiguration_OTHER,
|
||||
ConfigurationBytes: valueBytes,
|
||||
}
|
||||
|
||||
return cc, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagExif9101ComponentsConfiguration{},
|
||||
CodecExif9101ComponentsConfiguration{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x9101,
|
||||
CodecExif9101ComponentsConfiguration{})
|
||||
}
|
113
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_927C_maker_note.go
generated
vendored
100644
113
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_927C_maker_note.go
generated
vendored
100644
|
@ -0,0 +1,113 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"crypto/sha1"
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
type Tag927CMakerNote struct {
|
||||
MakerNoteType []byte
|
||||
MakerNoteBytes []byte
|
||||
}
|
||||
|
||||
func (Tag927CMakerNote) EncoderName() string {
|
||||
return "Codec927CMakerNote"
|
||||
}
|
||||
|
||||
func (mn Tag927CMakerNote) String() string {
|
||||
parts := make([]string, len(mn.MakerNoteType))
|
||||
|
||||
for i, c := range mn.MakerNoteType {
|
||||
parts[i] = fmt.Sprintf("%02x", c)
|
||||
}
|
||||
|
||||
h := sha1.New()
|
||||
|
||||
_, err := h.Write(mn.MakerNoteBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
digest := h.Sum(nil)
|
||||
|
||||
return fmt.Sprintf("MakerNote<TYPE-ID=[%s] LEN=(%d) SHA1=[%020x]>", strings.Join(parts, " "), len(mn.MakerNoteBytes), digest)
|
||||
}
|
||||
|
||||
type Codec927CMakerNote struct {
|
||||
}
|
||||
|
||||
func (Codec927CMakerNote) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
mn, ok := value.(Tag927CMakerNote)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag927CMakerNote")
|
||||
}
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return mn.MakerNoteBytes, uint32(len(mn.MakerNoteBytes)), nil
|
||||
}
|
||||
|
||||
func (Codec927CMakerNote) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// MakerNote
|
||||
// TODO(dustin): !! This is the Wild Wild West. This very well might be a child IFD, but any and all OEM's define their own formats. If we're going to be writing changes and this is complete EXIF (which may not have the first eight bytes), it might be fine. However, if these are just IFDs they'll be relative to the main EXIF, this will invalidate the MakerNote data for IFDs and any other implementations that use offsets unless we can interpret them all. It be best to return to this later and just exclude this from being written for now, though means a loss of a wealth of image metadata.
|
||||
// -> We can also just blindly try to interpret as an IFD and just validate that it's looks good (maybe it will even have a 'next ifd' pointer that we can validate is 0x0).
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
// TODO(dustin): Doesn't work, but here as an example.
|
||||
// ie := NewIfdEnumerate(valueBytes, byteOrder)
|
||||
|
||||
// // TODO(dustin): !! Validate types (might have proprietary types, but it might be worth splitting the list between valid and not valid; maybe fail if a certain proportion are invalid, or maybe aren't less then a certain small integer)?
|
||||
// ii, err := ie.Collect(0x0)
|
||||
|
||||
// for _, entry := range ii.RootIfd.Entries {
|
||||
// fmt.Printf("ENTRY: 0x%02x %d\n", entry.TagId, entry.TagType)
|
||||
// }
|
||||
|
||||
var makerNoteType []byte
|
||||
if len(valueBytes) >= 20 {
|
||||
makerNoteType = valueBytes[:20]
|
||||
} else {
|
||||
makerNoteType = valueBytes
|
||||
}
|
||||
|
||||
mn := Tag927CMakerNote{
|
||||
MakerNoteType: makerNoteType,
|
||||
|
||||
// MakerNoteBytes has the whole length of bytes. There's always
|
||||
// the chance that the first 20 bytes includes actual data.
|
||||
MakerNoteBytes: valueBytes,
|
||||
}
|
||||
|
||||
return mn, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag927CMakerNote{},
|
||||
Codec927CMakerNote{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x927c,
|
||||
Codec927CMakerNote{})
|
||||
}
|
142
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9286_user_comment.go
generated
vendored
100644
142
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_9286_user_comment.go
generated
vendored
100644
|
@ -0,0 +1,142 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
var (
|
||||
exif9286Logger = log.NewLogger("exifundefined.exif_9286_user_comment")
|
||||
)
|
||||
|
||||
const (
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED = iota
|
||||
)
|
||||
|
||||
var (
|
||||
TagUndefinedType_9286_UserComment_Encoding_Names = map[int]string{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: "ASCII",
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: "JIS",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: "UNICODE",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: "UNDEFINED",
|
||||
}
|
||||
|
||||
TagUndefinedType_9286_UserComment_Encodings = map[int][]byte{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: {'A', 'S', 'C', 'I', 'I', 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: {'J', 'I', 'S', 0, 0, 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: {'U', 'n', 'i', 'c', 'o', 'd', 'e', 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: {0, 0, 0, 0, 0, 0, 0, 0},
|
||||
}
|
||||
)
|
||||
|
||||
type Tag9286UserComment struct {
|
||||
EncodingType int
|
||||
EncodingBytes []byte
|
||||
}
|
||||
|
||||
func (Tag9286UserComment) EncoderName() string {
|
||||
return "Codec9286UserComment"
|
||||
}
|
||||
|
||||
func (uc Tag9286UserComment) String() string {
|
||||
var valuePhrase string
|
||||
|
||||
if uc.EncodingType == TagUndefinedType_9286_UserComment_Encoding_ASCII {
|
||||
return fmt.Sprintf("[ASCII] %s", string(uc.EncodingBytes))
|
||||
} else {
|
||||
if len(uc.EncodingBytes) <= 8 {
|
||||
valuePhrase = fmt.Sprintf("%v", uc.EncodingBytes)
|
||||
} else {
|
||||
valuePhrase = fmt.Sprintf("%v...", uc.EncodingBytes[:8])
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("UserComment<SIZE=(%d) ENCODING=[%s] V=%v LEN=(%d)>", len(uc.EncodingBytes), TagUndefinedType_9286_UserComment_Encoding_Names[uc.EncodingType], valuePhrase, len(uc.EncodingBytes))
|
||||
}
|
||||
|
||||
type Codec9286UserComment struct {
|
||||
}
|
||||
|
||||
func (Codec9286UserComment) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
uc, ok := value.(Tag9286UserComment)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag9286UserComment")
|
||||
}
|
||||
|
||||
encodingTypeBytes, found := TagUndefinedType_9286_UserComment_Encodings[uc.EncodingType]
|
||||
if found == false {
|
||||
log.Panicf("encoding-type not valid for unknown-type tag 9286 (UserComment): (%d)", uc.EncodingType)
|
||||
}
|
||||
|
||||
encoded = make([]byte, len(uc.EncodingBytes)+8)
|
||||
|
||||
copy(encoded[:8], encodingTypeBytes)
|
||||
copy(encoded[8:], uc.EncodingBytes)
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return encoded, uint32(len(encoded)), nil
|
||||
}
|
||||
|
||||
func (Codec9286UserComment) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(valueBytes) < 8 {
|
||||
return nil, ErrUnparseableValue
|
||||
}
|
||||
|
||||
unknownUc := Tag9286UserComment{
|
||||
EncodingType: TagUndefinedType_9286_UserComment_Encoding_UNDEFINED,
|
||||
EncodingBytes: []byte{},
|
||||
}
|
||||
|
||||
encoding := valueBytes[:8]
|
||||
for encodingIndex, encodingBytes := range TagUndefinedType_9286_UserComment_Encodings {
|
||||
if bytes.Compare(encoding, encodingBytes) == 0 {
|
||||
uc := Tag9286UserComment{
|
||||
EncodingType: encodingIndex,
|
||||
EncodingBytes: valueBytes[8:],
|
||||
}
|
||||
|
||||
return uc, nil
|
||||
}
|
||||
}
|
||||
|
||||
exif9286Logger.Warningf(nil, "User-comment encoding not valid. Returning 'unknown' type (the default).")
|
||||
return unknownUc, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag9286UserComment{},
|
||||
Codec9286UserComment{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x9286,
|
||||
Codec9286UserComment{})
|
||||
}
|
68
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A000_flashpix_version.go
generated
vendored
100644
68
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A000_flashpix_version.go
generated
vendored
100644
|
@ -0,0 +1,68 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
type TagA000FlashpixVersion struct {
|
||||
FlashpixVersion string
|
||||
}
|
||||
|
||||
func (TagA000FlashpixVersion) EncoderName() string {
|
||||
return "CodecA000FlashpixVersion"
|
||||
}
|
||||
|
||||
func (fv TagA000FlashpixVersion) String() string {
|
||||
return fv.FlashpixVersion
|
||||
}
|
||||
|
||||
type CodecA000FlashpixVersion struct {
|
||||
}
|
||||
|
||||
func (CodecA000FlashpixVersion) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
s, ok := value.(TagA000FlashpixVersion)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagA000FlashpixVersion")
|
||||
}
|
||||
|
||||
return []byte(s.FlashpixVersion), uint32(len(s.FlashpixVersion)), nil
|
||||
}
|
||||
|
||||
func (CodecA000FlashpixVersion) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeAsciiNoNul)
|
||||
|
||||
valueString, err := valueContext.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
|
||||
fv := TagA000FlashpixVersion{
|
||||
FlashpixVersion: valueString,
|
||||
}
|
||||
|
||||
return fv, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagA000FlashpixVersion{},
|
||||
CodecA000FlashpixVersion{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0xa000,
|
||||
CodecA000FlashpixVersion{})
|
||||
}
|
160
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A20C_spatial_frequency_response.go
generated
vendored
100644
160
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A20C_spatial_frequency_response.go
generated
vendored
100644
|
@ -0,0 +1,160 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type TagA20CSpatialFrequencyResponse struct {
|
||||
Columns uint16
|
||||
Rows uint16
|
||||
ColumnNames []string
|
||||
Values []exifcommon.Rational
|
||||
}
|
||||
|
||||
func (TagA20CSpatialFrequencyResponse) EncoderName() string {
|
||||
return "CodecA20CSpatialFrequencyResponse"
|
||||
}
|
||||
|
||||
func (sfr TagA20CSpatialFrequencyResponse) String() string {
|
||||
return fmt.Sprintf("CodecA20CSpatialFrequencyResponse<COLUMNS=(%d) ROWS=(%d)>", sfr.Columns, sfr.Rows)
|
||||
}
|
||||
|
||||
type CodecA20CSpatialFrequencyResponse struct {
|
||||
}
|
||||
|
||||
func (CodecA20CSpatialFrequencyResponse) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test.
|
||||
|
||||
sfr, ok := value.(TagA20CSpatialFrequencyResponse)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagA20CSpatialFrequencyResponse")
|
||||
}
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
err = binary.Write(b, byteOrder, sfr.Columns)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, byteOrder, sfr.Rows)
|
||||
log.PanicIf(err)
|
||||
|
||||
// Write columns.
|
||||
|
||||
for _, name := range sfr.ColumnNames {
|
||||
_, err := b.WriteString(name)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = b.WriteByte(0)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
// Write values.
|
||||
|
||||
ve := exifcommon.NewValueEncoder(byteOrder)
|
||||
|
||||
ed, err := ve.Encode(sfr.Values)
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write(ed.Encoded)
|
||||
log.PanicIf(err)
|
||||
|
||||
encoded = b.Bytes()
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return encoded, uint32(len(encoded)), nil
|
||||
}
|
||||
|
||||
func (CodecA20CSpatialFrequencyResponse) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test using known good data.
|
||||
|
||||
byteOrder := valueContext.ByteOrder()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
sfr := TagA20CSpatialFrequencyResponse{}
|
||||
|
||||
sfr.Columns = byteOrder.Uint16(valueBytes[0:2])
|
||||
sfr.Rows = byteOrder.Uint16(valueBytes[2:4])
|
||||
|
||||
columnNames := make([]string, sfr.Columns)
|
||||
|
||||
// startAt is where the current column name starts.
|
||||
startAt := 4
|
||||
|
||||
// offset is our current position.
|
||||
offset := 4
|
||||
|
||||
currentColumnNumber := uint16(0)
|
||||
|
||||
for currentColumnNumber < sfr.Columns {
|
||||
if valueBytes[offset] == 0 {
|
||||
columnName := string(valueBytes[startAt:offset])
|
||||
if len(columnName) == 0 {
|
||||
log.Panicf("SFR column (%d) has zero length", currentColumnNumber)
|
||||
}
|
||||
|
||||
columnNames[currentColumnNumber] = columnName
|
||||
currentColumnNumber++
|
||||
|
||||
offset++
|
||||
startAt = offset
|
||||
continue
|
||||
}
|
||||
|
||||
offset++
|
||||
}
|
||||
|
||||
sfr.ColumnNames = columnNames
|
||||
|
||||
rawRationalBytes := valueBytes[offset:]
|
||||
|
||||
rationalSize := exifcommon.TypeRational.Size()
|
||||
if len(rawRationalBytes)%rationalSize > 0 {
|
||||
log.Panicf("SFR rationals not aligned: (%d) %% (%d) > 0", len(rawRationalBytes), rationalSize)
|
||||
}
|
||||
|
||||
rationalCount := len(rawRationalBytes) / rationalSize
|
||||
|
||||
parser := new(exifcommon.Parser)
|
||||
|
||||
items, err := parser.ParseRationals(rawRationalBytes, uint32(rationalCount), byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
sfr.Values = items
|
||||
|
||||
return sfr, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagA20CSpatialFrequencyResponse{},
|
||||
CodecA20CSpatialFrequencyResponse{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0xa20c,
|
||||
CodecA20CSpatialFrequencyResponse{})
|
||||
}
|
78
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A300_file_source.go
generated
vendored
100644
78
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A300_file_source.go
generated
vendored
100644
|
@ -0,0 +1,78 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
type TagExifA300FileSource uint32
|
||||
|
||||
func (TagExifA300FileSource) EncoderName() string {
|
||||
return "CodecExifA300FileSource"
|
||||
}
|
||||
|
||||
func (af TagExifA300FileSource) String() string {
|
||||
return fmt.Sprintf("0x%08x", uint32(af))
|
||||
}
|
||||
|
||||
const (
|
||||
TagUndefinedType_A300_SceneType_Others TagExifA300FileSource = 0
|
||||
TagUndefinedType_A300_SceneType_ScannerOfTransparentType TagExifA300FileSource = 1
|
||||
TagUndefinedType_A300_SceneType_ScannerOfReflexType TagExifA300FileSource = 2
|
||||
TagUndefinedType_A300_SceneType_Dsc TagExifA300FileSource = 3
|
||||
)
|
||||
|
||||
type CodecExifA300FileSource struct {
|
||||
}
|
||||
|
||||
func (CodecExifA300FileSource) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
st, ok := value.(TagExifA300FileSource)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagExifA300FileSource")
|
||||
}
|
||||
|
||||
ve := exifcommon.NewValueEncoder(byteOrder)
|
||||
|
||||
ed, err := ve.Encode([]uint32{uint32(st)})
|
||||
log.PanicIf(err)
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification. It's non-specific about what type it is, but it looks to be no more than a single integer scalar. So, we're assuming it's a LONG.
|
||||
|
||||
return ed.Encoded, 1, nil
|
||||
}
|
||||
|
||||
func (CodecExifA300FileSource) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeLong)
|
||||
|
||||
valueLongs, err := valueContext.ReadLongs()
|
||||
log.PanicIf(err)
|
||||
|
||||
return TagExifA300FileSource(valueLongs[0]), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagExifA300FileSource(0),
|
||||
CodecExifA300FileSource{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0xa300,
|
||||
CodecExifA300FileSource{})
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type TagExifA301SceneType uint32
|
||||
|
||||
func (TagExifA301SceneType) EncoderName() string {
|
||||
return "CodecExifA301SceneType"
|
||||
}
|
||||
|
||||
func (st TagExifA301SceneType) String() string {
|
||||
return fmt.Sprintf("0x%08x", uint32(st))
|
||||
}
|
||||
|
||||
const (
|
||||
TagUndefinedType_A301_SceneType_DirectlyPhotographedImage TagExifA301SceneType = 1
|
||||
)
|
||||
|
||||
type CodecExifA301SceneType struct {
|
||||
}
|
||||
|
||||
func (CodecExifA301SceneType) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
st, ok := value.(TagExifA301SceneType)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagExif9101ComponentsConfiguration")
|
||||
}
|
||||
|
||||
ve := exifcommon.NewValueEncoder(byteOrder)
|
||||
|
||||
ed, err := ve.Encode([]uint32{uint32(st)})
|
||||
log.PanicIf(err)
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification. It's non-specific about what type it is, but it looks to be no more than a single integer scalar. So, we're assuming it's a LONG.
|
||||
|
||||
return ed.Encoded, uint32(int(ed.UnitCount)), nil
|
||||
}
|
||||
|
||||
func (CodecExifA301SceneType) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeLong)
|
||||
|
||||
valueLongs, err := valueContext.ReadLongs()
|
||||
log.PanicIf(err)
|
||||
|
||||
return TagExifA301SceneType(valueLongs[0]), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagExifA301SceneType(0),
|
||||
CodecExifA301SceneType{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0xa301,
|
||||
CodecExifA301SceneType{})
|
||||
}
|
96
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A302_cfa_pattern.go
generated
vendored
100644
96
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_A302_cfa_pattern.go
generated
vendored
100644
|
@ -0,0 +1,96 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
log "github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
type TagA302CfaPattern struct {
|
||||
HorizontalRepeat uint16
|
||||
VerticalRepeat uint16
|
||||
CfaValue []byte
|
||||
}
|
||||
|
||||
func (TagA302CfaPattern) EncoderName() string {
|
||||
return "CodecA302CfaPattern"
|
||||
}
|
||||
|
||||
func (cp TagA302CfaPattern) String() string {
|
||||
return fmt.Sprintf("TagA302CfaPattern<HORZ-REPEAT=(%d) VERT-REPEAT=(%d) CFA-VALUE=(%d)>", cp.HorizontalRepeat, cp.VerticalRepeat, len(cp.CfaValue))
|
||||
}
|
||||
|
||||
type CodecA302CfaPattern struct {
|
||||
}
|
||||
|
||||
func (CodecA302CfaPattern) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test.
|
||||
|
||||
cp, ok := value.(TagA302CfaPattern)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a TagA302CfaPattern")
|
||||
}
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
err = binary.Write(b, byteOrder, cp.HorizontalRepeat)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, byteOrder, cp.VerticalRepeat)
|
||||
log.PanicIf(err)
|
||||
|
||||
_, err = b.Write(cp.CfaValue)
|
||||
log.PanicIf(err)
|
||||
|
||||
encoded = b.Bytes()
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return encoded, uint32(len(encoded)), nil
|
||||
}
|
||||
|
||||
func (CodecA302CfaPattern) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): Add test using known good data.
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
cp := TagA302CfaPattern{}
|
||||
|
||||
cp.HorizontalRepeat = valueContext.ByteOrder().Uint16(valueBytes[0:2])
|
||||
cp.VerticalRepeat = valueContext.ByteOrder().Uint16(valueBytes[2:4])
|
||||
|
||||
expectedLength := int(cp.HorizontalRepeat * cp.VerticalRepeat)
|
||||
cp.CfaValue = valueBytes[4 : 4+expectedLength]
|
||||
|
||||
return cp, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
TagA302CfaPattern{},
|
||||
CodecA302CfaPattern{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0xa302,
|
||||
CodecA302CfaPattern{})
|
||||
}
|
69
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_iop_0002_interop_version.go
generated
vendored
100644
69
vendor/github.com/dsoprea/go-exif/v2/undefined/exif_iop_0002_interop_version.go
generated
vendored
100644
|
@ -0,0 +1,69 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag0002InteropVersion struct {
|
||||
InteropVersion string
|
||||
}
|
||||
|
||||
func (Tag0002InteropVersion) EncoderName() string {
|
||||
return "Codec0002InteropVersion"
|
||||
}
|
||||
|
||||
func (iv Tag0002InteropVersion) String() string {
|
||||
return iv.InteropVersion
|
||||
}
|
||||
|
||||
type Codec0002InteropVersion struct {
|
||||
}
|
||||
|
||||
func (Codec0002InteropVersion) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
s, ok := value.(Tag0002InteropVersion)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag0002InteropVersion")
|
||||
}
|
||||
|
||||
return []byte(s.InteropVersion), uint32(len(s.InteropVersion)), nil
|
||||
}
|
||||
|
||||
func (Codec0002InteropVersion) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeAsciiNoNul)
|
||||
|
||||
valueString, err := valueContext.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
|
||||
iv := Tag0002InteropVersion{
|
||||
InteropVersion: valueString,
|
||||
}
|
||||
|
||||
return iv, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag0002InteropVersion{},
|
||||
Codec0002InteropVersion{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifIopStandardIfdIdentity.UnindexedString(),
|
||||
0x0002,
|
||||
Codec0002InteropVersion{})
|
||||
}
|
65
vendor/github.com/dsoprea/go-exif/v2/undefined/gps_001B_gps_processing_method.go
generated
vendored
100644
65
vendor/github.com/dsoprea/go-exif/v2/undefined/gps_001B_gps_processing_method.go
generated
vendored
100644
|
@ -0,0 +1,65 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag001BGPSProcessingMethod struct {
|
||||
string
|
||||
}
|
||||
|
||||
func (Tag001BGPSProcessingMethod) EncoderName() string {
|
||||
return "Codec001BGPSProcessingMethod"
|
||||
}
|
||||
|
||||
func (gpm Tag001BGPSProcessingMethod) String() string {
|
||||
return gpm.string
|
||||
}
|
||||
|
||||
type Codec001BGPSProcessingMethod struct {
|
||||
}
|
||||
|
||||
func (Codec001BGPSProcessingMethod) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
s, ok := value.(Tag001BGPSProcessingMethod)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag001BGPSProcessingMethod")
|
||||
}
|
||||
|
||||
return []byte(s.string), uint32(len(s.string)), nil
|
||||
}
|
||||
|
||||
func (Codec001BGPSProcessingMethod) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeAsciiNoNul)
|
||||
|
||||
valueString, err := valueContext.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
|
||||
return Tag001BGPSProcessingMethod{valueString}, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag001BGPSProcessingMethod{},
|
||||
Codec001BGPSProcessingMethod{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdGpsInfoStandardIfdIdentity.UnindexedString(),
|
||||
0x001b,
|
||||
Codec001BGPSProcessingMethod{})
|
||||
}
|
65
vendor/github.com/dsoprea/go-exif/v2/undefined/gps_001C_gps_area_information.go
generated
vendored
100644
65
vendor/github.com/dsoprea/go-exif/v2/undefined/gps_001C_gps_area_information.go
generated
vendored
100644
|
@ -0,0 +1,65 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
|
||||
log "github.com/dsoprea/go-logging"
|
||||
|
||||
exifcommon "github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag001CGPSAreaInformation struct {
|
||||
string
|
||||
}
|
||||
|
||||
func (Tag001CGPSAreaInformation) EncoderName() string {
|
||||
return "Codec001CGPSAreaInformation"
|
||||
}
|
||||
|
||||
func (gai Tag001CGPSAreaInformation) String() string {
|
||||
return gai.string
|
||||
}
|
||||
|
||||
type Codec001CGPSAreaInformation struct {
|
||||
}
|
||||
|
||||
func (Codec001CGPSAreaInformation) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
s, ok := value.(Tag001CGPSAreaInformation)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag001CGPSAreaInformation")
|
||||
}
|
||||
|
||||
return []byte(s.string), uint32(len(s.string)), nil
|
||||
}
|
||||
|
||||
func (Codec001CGPSAreaInformation) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeAsciiNoNul)
|
||||
|
||||
valueString, err := valueContext.ReadAsciiNoNul()
|
||||
log.PanicIf(err)
|
||||
|
||||
return Tag001CGPSAreaInformation{valueString}, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag001CGPSAreaInformation{},
|
||||
Codec001CGPSAreaInformation{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdGpsInfoStandardIfdIdentity.UnindexedString(),
|
||||
0x001c,
|
||||
Codec001CGPSAreaInformation{})
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue