add support for map/slice conversions, tests

master
Felix Lange 8 years ago
parent e94794fd37
commit 5b3e75f776
  1. 300
      genmethod.go
  2. 7
      internal/clasherrors/clasherrors.go
  3. 7
      internal/clashjson/foo.go
  4. 19
      internal/tests/mapconv/input.go
  5. 72
      internal/tests/mapconv/output.go
  6. 34
      internal/tests/nameclash/input.go
  7. 112
      internal/tests/nameclash/output.go
  8. 17
      internal/tests/sliceconv/input.go
  9. 72
      internal/tests/sliceconv/output.go
  10. 500
      main.go
  11. 46
      main_test.go
  12. 244
      typeutil.go
  13. 78
      vendor/github.com/garslo/gogen/README.md
  14. 19
      vendor/github.com/garslo/gogen/assign.go
  15. 84
      vendor/github.com/garslo/gogen/binary_op.go
  16. 25
      vendor/github.com/garslo/gogen/builtins.go
  17. 7
      vendor/github.com/garslo/gogen/declaration.go
  18. 35
      vendor/github.com/garslo/gogen/declare.go
  19. 19
      vendor/github.com/garslo/gogen/declare_and_assign.go
  20. 17
      vendor/github.com/garslo/gogen/dotted.go
  21. 7
      vendor/github.com/garslo/gogen/expression.go
  22. 40
      vendor/github.com/garslo/gogen/for.go
  23. 127
      vendor/github.com/garslo/gogen/function.go
  24. 37
      vendor/github.com/garslo/gogen/function_call.go
  25. 29
      vendor/github.com/garslo/gogen/if.go
  26. 32
      vendor/github.com/garslo/gogen/import.go
  27. 36
      vendor/github.com/garslo/gogen/package.go
  28. 46
      vendor/github.com/garslo/gogen/range.go
  29. 17
      vendor/github.com/garslo/gogen/return.go
  30. 13
      vendor/github.com/garslo/gogen/star.go
  31. 7
      vendor/github.com/garslo/gogen/statement.go
  32. 141
      vendor/github.com/garslo/gogen/stdlib_imports.go
  33. 115
      vendor/github.com/garslo/gogen/struct.go
  34. 21
      vendor/github.com/garslo/gogen/thunk.go
  35. 9
      vendor/github.com/garslo/gogen/type.go
  36. 6
      vendor/github.com/garslo/gogen/typenames.go
  37. 61
      vendor/github.com/garslo/gogen/unary_op.go
  38. 44
      vendor/github.com/garslo/gogen/var.go
  39. 202
      vendor/github.com/kylelemons/godebug/LICENSE
  40. 186
      vendor/github.com/kylelemons/godebug/diff/diff.go
  41. 27
      vendor/golang.org/x/tools/LICENSE
  42. 22
      vendor/golang.org/x/tools/PATENTS
  43. 627
      vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
  44. 450
      vendor/golang.org/x/tools/go/ast/astutil/imports.go
  45. 14
      vendor/golang.org/x/tools/go/ast/astutil/util.go
  46. 172
      vendor/golang.org/x/tools/imports/fastwalk.go
  47. 13
      vendor/golang.org/x/tools/imports/fastwalk_dirent_fileno.go
  48. 13
      vendor/golang.org/x/tools/imports/fastwalk_dirent_ino.go
  49. 29
      vendor/golang.org/x/tools/imports/fastwalk_portable.go
  50. 122
      vendor/golang.org/x/tools/imports/fastwalk_unix.go
  51. 978
      vendor/golang.org/x/tools/imports/fix.go
  52. 289
      vendor/golang.org/x/tools/imports/imports.go
  53. 212
      vendor/golang.org/x/tools/imports/sortimports.go
  54. 9289
      vendor/golang.org/x/tools/imports/zstdlib.go
  55. 32
      vendor/vendor.json

@ -0,0 +1,300 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
package main
import (
"fmt"
"go/ast"
"go/printer"
"go/token"
"go/types"
"io"
"strconv"
"strings"
. "github.com/garslo/gogen"
)
var NIL = Name("nil")
type marshalMethod struct {
mtyp *marshalerType
scope *funcScope
}
func newMarshalMethod(mtyp *marshalerType) *marshalMethod {
return &marshalMethod{mtyp, newFuncScope(mtyp.scope)}
}
func writeFunction(w io.Writer, fs *token.FileSet, fn Function) {
printer.Fprint(w, fs, fn.Declaration())
fmt.Fprintln(w)
}
// genUnmarshalJSON generates the UnmarshalJSON method.
func genUnmarshalJSON(mtyp *marshalerType) Function {
var (
m = newMarshalMethod(mtyp)
recv = m.receiver()
input = Name(m.scope.newIdent("input"))
intertyp = m.intermediateType(m.scope.newIdent(m.mtyp.orig.Obj().Name() + "JSON"))
dec = Name(m.scope.newIdent("dec"))
conv = Name(m.scope.newIdent("x"))
json = Name(m.scope.parent.packageName("encoding/json"))
)
fn := Function{
Receiver: recv,
Name: "UnmarshalJSON",
ReturnTypes: Types{{TypeName: "error"}},
Parameters: Types{{Name: input.Name, TypeName: "[]byte"}},
Body: []Statement{
declStmt{intertyp},
Declare{Name: dec.Name, TypeName: intertyp.Name},
errCheck(CallFunction{
Func: Dotted{Receiver: json, Name: "Unmarshal"},
Params: []Expression{input, AddressOf{Value: dec}},
}),
Declare{Name: conv.Name, TypeName: m.mtyp.name},
},
}
fn.Body = append(fn.Body, m.unmarshalConversions(dec, conv, "json")...)
fn.Body = append(fn.Body, Assign{Lhs: Star{Value: Name(recv.Name)}, Rhs: conv})
fn.Body = append(fn.Body, Return{Values: []Expression{Name("nil")}})
return fn
}
// genMarshalJSON generates the MarshalJSON method.
func genMarshalJSON(mtyp *marshalerType) Function {
var (
m = newMarshalMethod(mtyp)
recv = m.receiver()
intertyp = m.intermediateType(m.scope.newIdent(m.mtyp.orig.Obj().Name() + "JSON"))
enc = Name(m.scope.newIdent("enc"))
json = Name(m.scope.parent.packageName("encoding/json"))
)
fn := Function{
Receiver: recv,
Name: "MarshalJSON",
ReturnTypes: Types{{TypeName: "[]byte"}, {TypeName: "error"}},
Body: []Statement{
declStmt{intertyp},
Declare{Name: enc.Name, TypeName: intertyp.Name},
},
}
fn.Body = append(fn.Body, m.marshalConversions(Name(recv.Name), enc, "json")...)
fn.Body = append(fn.Body, Return{Values: []Expression{
CallFunction{
Func: Dotted{Receiver: json, Name: "Marshal"},
Params: []Expression{AddressOf{Value: enc}},
},
}})
return fn
}
// genUnmarshalYAML generates the UnmarshalYAML method.
func genUnmarshalYAML(mtyp *marshalerType) Function {
var (
m = newMarshalMethod(mtyp)
recv = m.receiver()
unmarshal = Name(m.scope.newIdent("unmarshal"))
intertyp = m.intermediateType(m.scope.newIdent(m.mtyp.orig.Obj().Name() + "YAML"))
dec = Name(m.scope.newIdent("dec"))
conv = Name(m.scope.newIdent("x"))
)
fn := Function{
Receiver: recv,
Name: "UnmarshalYAML",
ReturnTypes: Types{{TypeName: "error"}},
Parameters: Types{{Name: unmarshal.Name, TypeName: "func (interface{}) error"}},
Body: []Statement{
declStmt{intertyp},
Declare{Name: dec.Name, TypeName: intertyp.Name},
errCheck(CallFunction{Func: unmarshal, Params: []Expression{AddressOf{Value: dec}}}),
Declare{Name: conv.Name, TypeName: m.mtyp.name},
},
}
fn.Body = append(fn.Body, m.unmarshalConversions(dec, conv, "yaml")...)
fn.Body = append(fn.Body, Assign{Lhs: Star{Value: Name(recv.Name)}, Rhs: conv})
fn.Body = append(fn.Body, Return{Values: []Expression{Name("nil")}})
return fn
}
// genMarshalYAML generates the MarshalYAML method.
func genMarshalYAML(mtyp *marshalerType) Function {
var (
m = newMarshalMethod(mtyp)
recv = m.receiver()
intertyp = m.intermediateType(m.scope.newIdent(m.mtyp.orig.Obj().Name() + "YAML"))
enc = Name(m.scope.newIdent("enc"))
)
fn := Function{
Receiver: recv,
Name: "MarshalYAML",
ReturnTypes: Types{{TypeName: "interface{}"}, {TypeName: "error"}},
Body: []Statement{
declStmt{intertyp},
Declare{Name: enc.Name, TypeName: intertyp.Name},
},
}
fn.Body = append(fn.Body, m.marshalConversions(Name(recv.Name), enc, "yaml")...)
fn.Body = append(fn.Body, Return{Values: []Expression{AddressOf{Value: enc}}})
return fn
}
func (m *marshalMethod) receiver() Receiver {
letter := strings.ToLower(m.mtyp.name[:1])
return Receiver{Name: m.scope.newIdent(letter), Type: Star{Value: Name(m.mtyp.name)}}
}
func (m *marshalMethod) intermediateType(name string) Struct {
s := Struct{Name: name}
for _, f := range m.mtyp.Fields {
s.Fields = append(s.Fields, Field{
Name: f.name,
TypeName: types.TypeString(f.typ, m.mtyp.scope.qualify),
Tag: f.tag,
})
}
return s
}
func (m *marshalMethod) unmarshalConversions(from, to Var, format string) (s []Statement) {
for _, f := range m.mtyp.Fields {
accessFrom := Dotted{Receiver: from, Name: f.name}
accessTo := Dotted{Receiver: to, Name: f.name}
if f.isOptional(format) {
s = append(s, If{
Condition: NotEqual{Lhs: accessFrom, Rhs: NIL},
Body: m.convert(accessFrom, accessTo, f.typ, f.origTyp),
})
} else {
err := fmt.Sprintf("missing required field %s for %s", f.encodedName(format), m.mtyp.name)
errors := m.scope.parent.packageName("errors")
s = append(s, If{
Condition: Equals{Lhs: accessFrom, Rhs: NIL},
Body: []Statement{
Return{
Values: []Expression{
CallFunction{
Func: Dotted{Receiver: Name(errors), Name: "New"},
Params: []Expression{stringLit{err}},
},
},
},
},
})
s = append(s, m.convert(accessFrom, accessTo, f.typ, f.origTyp)...)
}
}
return s
}
func (m *marshalMethod) marshalConversions(from, to Var, format string) (s []Statement) {
for _, f := range m.mtyp.Fields {
accessFrom := Dotted{Receiver: from, Name: f.name}
accessTo := Dotted{Receiver: to, Name: f.name}
s = append(s, m.convert(accessFrom, accessTo, f.origTyp, f.typ)...)
}
return s
}
func (m *marshalMethod) convert(from, to Expression, fromtyp, totyp types.Type) (s []Statement) {
// Remove pointer introduced by ensurePointer during field building.
if isPointer(fromtyp) && !isPointer(totyp) {
tmp := Name(m.scope.newIdent("v"))
s = append(s, DeclareAndAssign{Lhs: tmp, Rhs: Star{Value: from}})
from = tmp
fromtyp = fromtyp.(*types.Pointer).Elem()
} else if !isPointer(fromtyp) && isPointer(totyp) {
tmp := Name(m.scope.newIdent("v"))
s = append(s, DeclareAndAssign{Lhs: tmp, Rhs: AddressOf{Value: from}})
from = tmp
fromtyp = types.NewPointer(fromtyp)
}
// Generate the conversion.
qf := m.mtyp.scope.qualify
switch {
case types.ConvertibleTo(fromtyp, totyp):
s = append(s, Assign{Lhs: to, Rhs: simpleConv(from, fromtyp, totyp, qf)})
case isSlice(fromtyp):
fromElem := fromtyp.(*types.Slice).Elem()
toElem := totyp.(*types.Slice).Elem()
key := Name(m.scope.newIdent("i"))
s = append(s, Assign{Lhs: to, Rhs: makeExpr(totyp, from, qf)})
s = append(s, Range{
Key: key, RangeValue: from,
Body: []Statement{Assign{
Lhs: Index{Value: to, Index: key},
Rhs: simpleConv(Index{Value: from, Index: key}, fromElem, toElem, qf),
}},
})
case isMap(fromtyp):
fromKey, fromElem := fromtyp.(*types.Map).Key(), fromtyp.(*types.Map).Elem()
toKey, toElem := totyp.(*types.Map).Key(), totyp.(*types.Map).Elem()
key := Name(m.scope.newIdent("key"))
s = append(s, Assign{Lhs: to, Rhs: makeExpr(totyp, from, qf)})
s = append(s, Range{
Key: key, RangeValue: from,
Body: []Statement{Assign{
Lhs: Index{Value: to, Index: simpleConv(key, fromKey, toKey, qf)},
Rhs: simpleConv(Index{Value: from, Index: key}, fromElem, toElem, qf),
}},
})
default:
invalidConv(fromtyp, totyp, qf)
}
return s
}
func simpleConv(from Expression, fromtyp, totyp types.Type, qf types.Qualifier) Expression {
if types.AssignableTo(fromtyp, totyp) {
return from
}
if !types.ConvertibleTo(fromtyp, totyp) {
invalidConv(fromtyp, totyp, qf)
}
toname := types.TypeString(totyp, qf)
if isPointer(totyp) {
toname = "(" + toname + ")" // hack alert!
}
return CallFunction{Func: Name(toname), Params: []Expression{from}}
}
func invalidConv(from, to types.Type, qf types.Qualifier) {
panic(fmt.Errorf("BUG: invalid conversion %s -> %s", types.TypeString(from, qf), types.TypeString(to, qf)))
}
func makeExpr(typ types.Type, lenfrom Expression, qf types.Qualifier) Expression {
return CallFunction{Func: Name("make"), Params: []Expression{
Name(types.TypeString(typ, qf)),
CallFunction{Func: Name("len"), Params: []Expression{lenfrom}},
}}
}
func errCheck(expr Expression) If {
err := Name("err")
return If{
Init: DeclareAndAssign{Lhs: err, Rhs: expr},
Condition: Equals{Lhs: err, Rhs: NIL},
Body: []Statement{Return{Values: []Expression{err}}},
}
}
type stringLit struct {
V string
}
func (l stringLit) Expression() ast.Expr {
return &ast.BasicLit{Kind: token.STRING, Value: strconv.Quote(l.V)}
}
type declStmt struct {
d Declaration
}
func (ds declStmt) Statement() ast.Stmt {
return &ast.DeclStmt{Decl: ds.d.Declaration()}
}

@ -0,0 +1,7 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
package errors
type Foo struct{}

@ -0,0 +1,7 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
package json
type Foo struct{}

@ -0,0 +1,19 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
//go:generate gencodec -type X -field-override Xo -out output.go
package mapconv
type replacedString string
type replacedInt int
type X struct {
M map[string]int
}
type Xo struct {
M map[replacedString]replacedInt
}

@ -0,0 +1,72 @@
// generated by gencodec, do not edit.
package mapconv
import (
"encoding/json"
"errors"
)
func (x *X) MarshalJSON() ([]byte, error) {
type XJSON struct {
M map[replacedString]replacedInt
}
var enc XJSON
enc.M = make(map[replacedString]replacedInt, len(x.M))
for key, _ := range x.M {
enc.M[replacedString(key)] = replacedInt(x.M[key])
}
return json.Marshal(&enc)
}
func (x *X) UnmarshalJSON(input []byte) error {
type XJSON struct {
M map[replacedString]replacedInt
}
var dec XJSON
if err := json.Unmarshal(input, &dec); err == nil {
return err
}
var x0 X
if dec.M == nil {
return errors.New("missing required field m for X")
}
x0.M = make(map[string]int, len(dec.M))
for key, _ := range dec.M {
x0.M[string(key)] = int(dec.M[key])
}
*x = x0
return nil
}
func (x *X) MarshalYAML() (interface{}, error) {
type XYAML struct {
M map[replacedString]replacedInt
}
var enc XYAML
enc.M = make(map[replacedString]replacedInt, len(x.M))
for key, _ := range x.M {
enc.M[replacedString(key)] = replacedInt(x.M[key])
}
return &enc
}
func (x *X) UnmarshalYAML(unmarshal func(interface{}) error) error {
type XYAML struct {
M map[replacedString]replacedInt
}
var dec XYAML
if err := unmarshal(&dec); err == nil {
return err
}
var x0 X
if dec.M == nil {
return errors.New("missing required field m for X")
}
x0.M = make(map[string]int, len(dec.M))
for key, _ := range dec.M {
x0.M[string(key)] = int(dec.M[key])
}
*x = x0
return nil
}

@ -0,0 +1,34 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
//go:generate gencodec -type Y -field-override Yo -out output.go
package nameclash
import (
errors "github.com/fjl/gencodec/internal/clasherrors"
json "github.com/fjl/gencodec/internal/clashjson"
)
// This one clashes with the generated intermediate type name.
type YJSON struct{}
// This type clashes with a name in the override struct.
type enc int
// These types clash with variables, but are ignored because they're
// not referenced by the struct.
type input struct{}
type dec struct{}
type Y struct {
Foo json.Foo `optional:"true"`
Bar errors.Foo `optional:"true"`
Gazonk YJSON `optional:"true"`
Over int `optional:"true"`
}
type Yo struct {
Over enc
}

@ -0,0 +1,112 @@
// generated by gencodec, do not edit.
package nameclash
import (
"encoding/json"
errors0 "github.com/fjl/gencodec/internal/clasherrors"
json0 "github.com/fjl/gencodec/internal/clashjson"
)
func (y *Y) MarshalJSON() ([]byte, error) {
type YJSON0 struct {
Foo *json0.Foo `optional:"true"`
Bar *errors0.Foo `optional:"true"`
Gazonk *YJSON `optional:"true"`
Over *enc `optional:"true"`
}
var enc0 YJSON0
v := &y.Foo
enc0.Foo = v
v0 := &y.Bar
enc0.Bar = v0
v1 := &y.Gazonk
enc0.Gazonk = v1
v2 := &y.Over
enc0.Over = (*enc)(v2)
return json.Marshal(&enc0)
}
func (y *Y) UnmarshalJSON(input []byte) error {
type YJSON0 struct {
Foo *json0.Foo `optional:"true"`
Bar *errors0.Foo `optional:"true"`
Gazonk *YJSON `optional:"true"`
Over *enc `optional:"true"`
}
var dec YJSON0
if err := json.Unmarshal(input, &dec); err == nil {
return err
}
var x Y
if dec.Foo != nil {
v := *dec.Foo
x.Foo = v
}
if dec.Bar != nil {
v0 := *dec.Bar
x.Bar = v0
}
if dec.Gazonk != nil {
v1 := *dec.Gazonk
x.Gazonk = v1
}
if dec.Over != nil {
v2 := *dec.Over
x.Over = int(v2)
}
*y = x
return nil
}
func (y *Y) MarshalYAML() (interface{}, error) {
type YYAML struct {
Foo *json0.Foo `optional:"true"`
Bar *errors0.Foo `optional:"true"`
Gazonk *YJSON `optional:"true"`
Over *enc `optional:"true"`
}
var enc0 YYAML
v := &y.Foo
enc0.Foo = v
v0 := &y.Bar
enc0.Bar = v0
v1 := &y.Gazonk
enc0.Gazonk = v1
v2 := &y.Over
enc0.Over = (*enc)(v2)
return &enc0
}
func (y *Y) UnmarshalYAML(unmarshal func(interface{}) error) error {
type YYAML struct {
Foo *json0.Foo `optional:"true"`
Bar *errors0.Foo `optional:"true"`
Gazonk *YJSON `optional:"true"`
Over *enc `optional:"true"`
}
var dec YYAML
if err := unmarshal(&dec); err == nil {
return err
}
var x Y
if dec.Foo != nil {
v := *dec.Foo
x.Foo = v
}
if dec.Bar != nil {
v0 := *dec.Bar
x.Bar = v0
}
if dec.Gazonk != nil {
v1 := *dec.Gazonk
x.Gazonk = v1
}
if dec.Over != nil {
v2 := *dec.Over
x.Over = int(v2)
}
*y = x
return nil
}

@ -0,0 +1,17 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
//go:generate gencodec -type X -field-override Xo -out output.go
package sliceconv
type replacedInt int
type X struct {
S []int
}
type Xo struct {
S []replacedInt
}

@ -0,0 +1,72 @@
// generated by gencodec, do not edit.
package sliceconv
import (
"encoding/json"
"errors"
)
func (x *X) MarshalJSON() ([]byte, error) {
type XJSON struct {
S []replacedInt
}
var enc XJSON
enc.S = make([]replacedInt, len(x.S))
for i, _ := range x.S {
enc.S[i] = replacedInt(x.S[i])
}
return json.Marshal(&enc)
}
func (x *X) UnmarshalJSON(input []byte) error {
type XJSON struct {
S []replacedInt
}
var dec XJSON
if err := json.Unmarshal(input, &dec); err == nil {
return err
}
var x0 X
if dec.S == nil {
return errors.New("missing required field s for X")
}
x0.S = make([]int, len(dec.S))
for i, _ := range dec.S {
x0.S[i] = int(dec.S[i])
}
*x = x0
return nil
}
func (x *X) MarshalYAML() (interface{}, error) {
type XYAML struct {
S []replacedInt
}
var enc XYAML
enc.S = make([]replacedInt, len(x.S))
for i, _ := range x.S {
enc.S[i] = replacedInt(x.S[i])
}
return &enc
}
func (x *X) UnmarshalYAML(unmarshal func(interface{}) error) error {
type XYAML struct {
S []replacedInt
}
var dec XYAML
if err := unmarshal(&dec); err == nil {
return err
}
var x0 X
if dec.S == nil {
return errors.New("missing required field s for X")
}
x0.S = make([]int, len(dec.S))
for i, _ := range dec.S {
x0.S[i] = int(dec.S[i])
}
*x = x0
return nil
}

@ -48,12 +48,59 @@ will be used to parse the value of SpecialField.
SpecialField specialString // overrides type of SpecialField when marshaling/unmarshaling
}
Relaxed Field Conversions
Field types in the override struct must be trivially convertible to the original field
type. gencodec's definition of 'convertible' is less restrictive than the usual rules
defined in the Go language specification.
The following conversions are supported:
If the fields are directly assignable, no conversion is generated.
If the fields are convertible according to Go language rules, a simple conversion is
emitted. Example input code:
type specialString string
func (s *specialString) UnmarshalJSON(input []byte) error { ... }
type Foo struct{ S string }
type fooMarshaling struct{ S specialString }
The generated code will contain:
func (f *Foo) UnmarshalJSON(input []byte) error {
var dec struct{ S *specialString }
...
f.S = string(dec.specialString)
...
}
If the fields are of map or slice type and the element (and key) types are convertible, a
simple loop is emitted. Example input code:
type Foo2 struct{ M map[string]string }
type foo2Marshaling struct{ S map[string]specialString }
The generated code is similar to this snippet:
func (f *Foo2) UnmarshalJSON(input []byte) error {
var dec struct{ M map[string]specialString }
...
for key, _ := range dec.M {
f.M[key] = string(dec.M[key])
}
...
}
*/
package main
import (
"bytes"
"errors"
"flag"
"fmt"
"go/ast"
@ -64,9 +111,7 @@ import (
"io/ioutil"
"os"
"reflect"
"strconv"
"strings"
"text/template"
"golang.org/x/tools/imports"
)
@ -80,9 +125,11 @@ func main() {
)
flag.Parse()
fs := token.NewFileSet()
pkg := loadPackage(fs, *pkgdir)
code := makeMarshalingCode(fs, pkg, *typename, *overrides)
cfg := Config{Dir: *pkgdir, Type: *typename, FieldOverride: *overrides}
code, err := cfg.process()
if err != nil {
fatal(err)
}
if *output == "-" {
os.Stdout.Write(code)
} else if err := ioutil.WriteFile(*output, code, 0644); err != nil {
@ -90,14 +137,67 @@ func main() {
}
}
func loadPackage(fs *token.FileSet, dir string) *types.Package {
func fatal(args ...interface{}) {
fmt.Fprintln(os.Stderr, args...)
os.Exit(1)
}
type Config struct {
Dir string // input package directory
Type string // type to generate methods for
FieldOverride string // name of struct type for field overrides
Importer types.Importer
FileSet *token.FileSet
}
func (cfg *Config) process() (code []byte, err error) {
if cfg.FileSet == nil {
cfg.FileSet = token.NewFileSet()
}
if cfg.Importer == nil {
cfg.Importer = importer.Default()
}
pkg, err := loadPackage(cfg)
if err != nil {
return nil, err
}
typ, err := lookupStructType(pkg.Scope(), cfg.Type)
if err != nil {
return nil, fmt.Errorf("can't find %s: %v", cfg.Type, err)
}
// Construct the marshaling type.
mtyp := newMarshalerType(cfg.FileSet, cfg.Importer, typ)
if cfg.FieldOverride != "" {
otyp, err := lookupStructType(pkg.Scope(), cfg.FieldOverride)
if err != nil {
return nil, fmt.Errorf("can't find field replacement type %s: %v", cfg.FieldOverride, err)
}
err = mtyp.loadOverrides(cfg.FieldOverride, otyp.Underlying().(*types.Struct))
if err != nil {
return nil, err
}
}
// Generate and format the output.
// Use goimports to format the source because it separates imports.
code = genPackage(mtyp)
opt := &imports.Options{Comments: true, TabIndent: true, TabWidth: 8}
code, err = imports.Process("", code, opt)
if err != nil {
return code, fmt.Errorf("can't gofmt generated code:", err, "\n"+string(code))
}
return code, nil
}
func loadPackage(cfg *Config) (*types.Package, error) {
// Load the package.
pkgs, err := parser.ParseDir(fs, dir, nil, parser.AllErrors)
pkgs, err := parser.ParseDir(cfg.FileSet, cfg.Dir, nil, parser.AllErrors)
if err != nil {
fatal(err)
return nil, err
}
if len(pkgs) == 0 || len(pkgs) > 1 {
fatal(err)
return nil, fmt.Errorf("input directory must contain exactly one package")
}
var files []*ast.File
var name string
@ -109,89 +209,72 @@ func loadPackage(fs *token.FileSet, dir string) *types.Package {
break
}
// Type-check the package.
cfg := types.Config{
IgnoreFuncBodies: true,
FakeImportC: true,
Importer: importer.Default(),
}
tpkg, err := cfg.Check(name, fs, files, nil)
tcfg := types.Config{IgnoreFuncBodies: true, FakeImportC: true, Importer: cfg.Importer}
tpkg, err := tcfg.Check(name, cfg.FileSet, files, nil)
if err != nil {
fatal(err)
}
return tpkg
}
func makeMarshalingCode(fs *token.FileSet, pkg *types.Package, typename, otypename string) (packageBody []byte) {
typ, err := lookupStructType(pkg.Scope(), typename)
if err != nil {
fatal(fmt.Sprintf("can't find %s: %v", typename, err))
}
mtyp := newMarshalerType(fs, pkg, typ)
if otypename != "" {
otyp, err := lookupStructType(pkg.Scope(), otypename)
if err != nil {
fatal(fmt.Sprintf("can't find field replacement type %s: %v", otypename, err))
return nil, err
}
mtyp.loadOverrides(otypename, otyp.Underlying().(*types.Struct))
return tpkg, nil
}
func genPackage(mtyp *marshalerType) []byte {
w := new(bytes.Buffer)
fmt.Fprintln(w, "// generated by gencodec, do not edit.\n")
fmt.Fprintln(w, "package ", pkg.Name())
fmt.Fprintln(w, render(mtyp.computeImports(), `
import (
{{- range $name, $path := . }}
{{ $name }} "{{ $path }}"
{{- end }}
)`))
fmt.Fprintln(w, "package", mtyp.orig.Obj().Pkg().Name())
fmt.Fprintln(w)
fmt.Fprintln(w, mtyp.JSONMarshalMethod())
mtyp.scope.writeImportDecl(w)
fmt.Fprintln(w)
fmt.Fprintln(w, mtyp.JSONUnmarshalMethod())
writeFunction(w, mtyp.fs, genMarshalJSON(mtyp))
fmt.Fprintln(w)
fmt.Fprintln(w, mtyp.YAMLMarshalMethod())
writeFunction(w, mtyp.fs, genUnmarshalJSON(mtyp))
fmt.Fprintln(w)
fmt.Fprintln(w, mtyp.YAMLUnmarshalMethod())
// Use goimports to format the source because it separates imports.
opt := &imports.Options{Comments: true, FormatOnly: true, TabIndent: true, TabWidth: 8}
body, err := imports.Process("", w.Bytes(), opt)
if err != nil {
fatal("can't gofmt generated code:", err, "\n"+w.String())
}
return body
writeFunction(w, mtyp.fs, genMarshalYAML(mtyp))
fmt.Fprintln(w)
writeFunction(w, mtyp.fs, genUnmarshalYAML(mtyp))
return w.Bytes()
}
// marshalerType represents the intermediate struct type used during marshaling.
// This is the input data to all the Go code templates.
type marshalerType struct {
OrigName string
Name string
name string
Fields []*marshalerField
fs *token.FileSet
orig *types.Named
scope *fileScope
}
// marshalerField represents a field of the intermediate marshaling type.
type marshalerField struct {
parent *marshalerType
field *types.Var
name string
typ types.Type
origTyp types.Type
tag string
}
func newMarshalerType(fs *token.FileSet, pkg *types.Package, typ *types.Named) *marshalerType {
name := typ.Obj().Name() + "JSON"
func newMarshalerType(fs *token.FileSet, imp types.Importer, typ *types.Named) *marshalerType {
mtyp := &marshalerType{name: typ.Obj().Name(), fs: fs, orig: typ}
styp := typ.Underlying().(*types.Struct)
mtyp := &marshalerType{OrigName: typ.Obj().Name(), Name: name, fs: fs, orig: typ}
mtyp.scope = newFileScope(imp, typ.Obj().Pkg())
mtyp.scope.addReferences(styp)
// Add packages which are always needed.
mtyp.scope.addImport("encoding/json")
mtyp.scope.addImport("errors")
for i := 0; i < styp.NumFields(); i++ {
f := styp.Field(i)
if !f.Exported() {
continue
}
mf := &marshalerField{parent: mtyp, field: f, typ: ensurePointer(f.Type()), tag: styp.Tag(i)}
mf := &marshalerField{
name: f.Name(),
typ: ensurePointer(f.Type()),
origTyp: f.Type(),
tag: styp.Tag(i),
}
if f.Anonymous() {
fmt.Fprintln(os.Stderr, mf.errorf("Warning: ignoring embedded field"))
fmt.Fprintf(os.Stderr, "Warning: ignoring embedded field %s\n", f.Name())
continue
}
mtyp.Fields = append(mtyp.Fields, mf)
@ -201,225 +284,34 @@ func newMarshalerType(fs *token.FileSet, pkg *types.Package, typ *types.Named) *
// loadOverrides sets field types of the intermediate marshaling type from
// matching fields of otyp.
func (mtyp *marshalerType) loadOverrides(otypename string, otyp *types.Struct) {
func (mtyp *marshalerType) loadOverrides(otypename string, otyp *types.Struct) error {
for i := 0; i < otyp.NumFields(); i++ {
of := otyp.Field(i)
if of.Anonymous() || !of.Exported() {
fatalf("%v: field override type cannot have embedded or unexported fields", mtyp.fs.Position(of.Pos()))
return fmt.Errorf("%v: field override type cannot have embedded or unexported fields", mtyp.fs.Position(of.Pos()))
}
f := mtyp.fieldByName(of.Name())
if f == nil {
fatalf("%v: no matching field for %s in original type %s", mtyp.fs.Position(of.Pos()), of.Name(), mtyp.OrigName)
return fmt.Errorf("%v: no matching field for %s in original type %s", mtyp.fs.Position(of.Pos()), of.Name(), mtyp.name)
}
if !types.ConvertibleTo(of.Type(), f.field.Type()) {
fatalf("%v: field override type %s is not convertible to %s", mtyp.fs.Position(of.Pos()), mtyp.typeString(of.Type()), mtyp.typeString(f.field.Type()))
if err := checkConvertible(of.Type(), f.origTyp); err != nil {
return fmt.Errorf("%v: invalid field override: %v", mtyp.fs.Position(of.Pos()), err)
}
f.typ = ensurePointer(of.Type())
}
mtyp.scope.addReferences(otyp)
return nil
}
func (mtyp *marshalerType) fieldByName(name string) *marshalerField {
for _, f := range mtyp.Fields {
if f.field.Name() == name {
if f.name == name {
return f
}
}
return nil
}
// computeImports returns the import paths of all referenced types.
// computeImports must be called before generating any code because it
// renames packages to avoid name clashes.
func (mtyp *marshalerType) computeImports() map[string]string {
seen := make(map[string]string)
counter := 0
add := func(name string, path string, pkg *types.Package) {
if seen[name] != path {
if pkg != nil {
name = "_" + name
pkg.SetName(name)
}
if seen[name] != "" {
// Name clash, add counter.
name += "_" + strconv.Itoa(counter)
counter++
pkg.SetName(name)
}
seen[name] = path
}
}
addNamed := func(typ *types.Named) {
if pkg := typ.Obj().Pkg(); pkg != mtyp.orig.Obj().Pkg() {
add(pkg.Name(), pkg.Path(), pkg)
}
}
// Add packages which always referenced by the generated code.
add("json", "encoding/json", nil)
add("errors", "errors", nil)
for _, f := range mtyp.Fields {
// Add field types of the intermediate struct.
walkNamedTypes(f.typ, addNamed)
// Add field types of the original struct. Note that this won't generate unused
// imports because all fields are either referenced by a conversion or by fields
// of the intermediate struct (if no conversion is needed).
walkNamedTypes(f.field.Type(), addNamed)
}
return seen
}
// JSONMarshalMethod generates MarshalJSON.
func (mtyp *marshalerType) JSONMarshalMethod() string {
return render(mtyp, `
// MarshalJSON implements json.Marshaler.
func (x *{{.OrigName}}) MarshalJSON() ([]byte, error) {
{{.TypeDecl}}
return json.Marshal(&{{.Name}}{
{{- range .Fields}}
{{.Name}}: {{.Convert "x"}},
{{- end}}
})
}`)
}
// YAMLMarsalMethod generates MarshalYAML.
func (mtyp *marshalerType) YAMLMarshalMethod() string {
return render(mtyp, `
// MarshalYAML implements yaml.Marshaler
func (x *{{.OrigName}}) MarshalYAML() (interface{}, error) {
{{.TypeDecl}}
return &{{.Name}}{
{{- range .Fields}}
{{.Name}}: {{.Convert "x"}},
{{- end}}
}, nil
}`)
}
// JSONUnmarshalMethod generates UnmarshalJSON.
func (mtyp *marshalerType) JSONUnmarshalMethod() string {
return render(mtyp, `
// UnmarshalJSON implements json.Unmarshaler.
func (x *{{.OrigName}}) UnmarshalJSON(input []byte) error {
{{.TypeDecl}}
var dec {{.Name}}
if err := json.Unmarshal(input, &dec); err != nil {
return err
}
var v {{.OrigName}}
{{.UnmarshalConversions "json"}}
*x = v
return nil
}`)
}
// YAMLUnmarshalMethod generates UnmarshalYAML.
func (mtyp *marshalerType) YAMLUnmarshalMethod() string {
return render(mtyp, `
// UnmarshalYAML implements yaml.Unmarshaler.
func (x *{{.OrigName}}) UnmarshalYAML(fn func (interface{}) error) error {
{{.TypeDecl}}
var dec {{.Name}}
if err := fn(&dec); err != nil {
return err
}
var v {{.OrigName}}
{{.UnmarshalConversions "yaml"}}
*x = v
return nil
}`)
}
// TypeDecl genereates the declaration of the intermediate marshaling type.
func (mtyp *marshalerType) TypeDecl() string {
return render(mtyp, `
type {{.Name}} struct{
{{- range .Fields}}
{{.Name}} {{.Type}} {{.StructTag}}
{{- end}}
}`)
}
// UnmarshalConversion genereates field conversions and presence checks.
func (mtyp *marshalerType) UnmarshalConversions(formatTag string) (s string) {
type fieldContext struct{ Typ, Name, EncName, Conv string }
for _, mf := range mtyp.Fields {
ctx := fieldContext{
Typ: strings.ToUpper(formatTag) + " " + mtyp.OrigName,
Name: mf.Name(),
EncName: mf.encodedName(formatTag),
Conv: mf.ConvertBack("dec"),
}
if mf.isOptional(formatTag) {
s += render(ctx, `
if dec.{{.Name}} != nil {
v.{{.Name}} = {{.Conv}}
}`)
} else {
s += render(ctx, `
if dec.{{.Name}} == nil {
return errors.New("missing required field '{{.EncName}}' in {{.Typ}}")
}
v.{{.Name}} = {{.Conv}}`)
}
s += "\n"
}
return s
}
func (mf *marshalerField) Name() string {
return mf.field.Name()
}
func (mf *marshalerField) Type() string {
return mf.parent.typeString(mf.typ)
}
func (mf *marshalerField) OrigType() string {
return mf.parent.typeString(mf.typ)
}
func (mf *marshalerField) StructTag() string {
if mf.tag == "" {
return ""
}
return "`" + mf.tag + "`"
}
func (mf *marshalerField) Convert(variable string) string {
expr := fmt.Sprintf("%s.%s", variable, mf.field.Name())
return mf.parent.conversionExpr(expr, mf.field.Type(), mf.typ)
}
func (mf *marshalerField) ConvertBack(variable string) string {
expr := fmt.Sprintf("%s.%s", variable, mf.field.Name())
return mf.parent.conversionExpr(expr, mf.typ, mf.field.Type())
}
func (mtyp *marshalerType) conversionExpr(valueExpr string, from, to types.Type) string {
if isPointer(from) && !isPointer(to) {
valueExpr = "*" + valueExpr
from = from.(*types.Pointer).Elem()
} else if !isPointer(from) && isPointer(to) {
valueExpr = "&" + valueExpr
from = types.NewPointer(from)
}
if types.AssignableTo(from, to) {
return valueExpr
}
return fmt.Sprintf("(%s)(%s)", mtyp.typeString(to), valueExpr)
}
func (mf *marshalerField) errorf(format string, args ...interface{}) error {
pos := mf.parent.fs.Position(mf.field.Pos()).String()
return errors.New(pos + ": (" + mf.parent.OrigName + "." + mf.Name() + ") " + fmt.Sprintf(format, args...))
}
// isOptional returns whether the field is optional when decoding the given format.
func (mf *marshalerField) isOptional(format string) bool {
rtag := reflect.StructTag(mf.tag)
@ -437,99 +329,11 @@ func (mf *marshalerField) encodedName(format string) string {
val = val[:comma]
}
if val == "" || val == "-" {
return uncapitalize(mf.Name())
return uncapitalize(mf.name)
}
return val
}
func (mtyp *marshalerType) typeString(typ types.Type) string {
return types.TypeString(typ, func(pkg *types.Package) string {
if pkg == mtyp.orig.Obj().Pkg() {
return ""
}
return pkg.Name()
})
}
// walkNamedTypes runs the callback for all named types contained in the given type.
func walkNamedTypes(typ types.Type, callback func(*types.Named)) {
switch typ := typ.(type) {
case *types.Basic:
case *types.Chan:
walkNamedTypes(typ.Elem(), callback)
case *types.Map:
walkNamedTypes(typ.Key(), callback)
walkNamedTypes(typ.Elem(), callback)
case *types.Named:
callback(typ)
case *types.Pointer:
walkNamedTypes(typ.Elem(), callback)
case *types.Slice:
walkNamedTypes(typ.Elem(), callback)
case *types.Struct:
for i := 0; i < typ.NumFields(); i++ {
walkNamedTypes(typ.Field(i).Type(), callback)
}
default:
panic(fmt.Errorf("can't walk %T", typ))
}
}
func lookupStructType(scope *types.Scope, name string) (*types.Named, error) {
typ, err := lookupType(scope, name)
if err != nil {
return nil, err
}
_, ok := typ.Underlying().(*types.Struct)
if !ok {
return nil, errors.New("not a struct type")
}
return typ, nil
}
func lookupType(scope *types.Scope, name string) (*types.Named, error) {
obj := scope.Lookup(name)
if obj == nil {
return nil, errors.New("no such identifier")
}
typ, ok := obj.(*types.TypeName)
if !ok {
return nil, errors.New("not a type")
}
return typ.Type().(*types.Named), nil
}
func isPointer(typ types.Type) bool {
_, ok := typ.(*types.Pointer)
return ok
}
func ensurePointer(typ types.Type) types.Type {
if isPointer(typ) {
return typ
}
return types.NewPointer(typ)
}
func uncapitalize(s string) string {
return strings.ToLower(s[:1]) + s[1:]
}
func render(data interface{}, text string) string {
t := template.Must(template.New("").Parse(strings.TrimSpace(text)))
out := new(bytes.Buffer)
if err := t.Execute(out, data); err != nil {
panic(err)
}
return out.String()
}
func fatal(args ...interface{}) {
fmt.Fprintln(os.Stderr, args...)
os.Exit(1)
}
func fatalf(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, format+"\n", args...)
os.Exit(1)
}

@ -0,0 +1,46 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
package main
import (
"io/ioutil"
"path/filepath"
"testing"
"github.com/kylelemons/godebug/diff"
)
// 'golden' tests. These tests compare the output code with the expected
// code in internal/tests/*/output.go. The expected can be updated using
//
// go generate ./internal/...
func TestMapConv(t *testing.T) {
runGoldenTest(t, Config{Dir: "mapconv", Type: "X", FieldOverride: "Xo"})
}
func TestSliceConv(t *testing.T) {
runGoldenTest(t, Config{Dir: "sliceconv", Type: "X", FieldOverride: "Xo"})
}
func TestNameClash(t *testing.T) {
runGoldenTest(t, Config{Dir: "nameclash", Type: "Y", FieldOverride: "Yo"})
}
func runGoldenTest(t *testing.T, cfg Config) {
cfg.Dir = filepath.Join("internal", "tests", cfg.Dir)
want, err := ioutil.ReadFile(filepath.Join(cfg.Dir, "output.go"))
if err != nil {
t.Fatal(err)
}
code, err := cfg.process()
if err != nil {
t.Fatal(err)
}
if d := diff.Diff(string(want), string(code)); d != "" {
t.Errorf("output mismatch\n\n%s", d)
}
}

@ -0,0 +1,244 @@
// Copyright 2017 Felix Lange <fjl@twurst.com>.
// Use of this source code is governed by the MIT license,
// which can be found in the LICENSE file.
package main
import (
"errors"
"fmt"
"go/types"
"io"
"sort"
"strconv"
)
// walkNamedTypes runs the callback for all named types contained in the given type.
func walkNamedTypes(typ types.Type, callback func(*types.Named)) {
switch typ := typ.(type) {
case *types.Basic:
case *types.Chan:
walkNamedTypes(typ.Elem(), callback)
case *types.Map:
walkNamedTypes(typ.Key(), callback)
walkNamedTypes(typ.Elem(), callback)
case *types.Named:
callback(typ)
case *types.Pointer:
walkNamedTypes(typ.Elem(), callback)
case *types.Slice:
walkNamedTypes(typ.Elem(), callback)
case *types.Struct:
for i := 0; i < typ.NumFields(); i++ {
walkNamedTypes(typ.Field(i).Type(), callback)
}
default:
panic(fmt.Errorf("can't walk %T", typ))
}
}
func lookupStructType(scope *types.Scope, name string) (*types.Named, error) {
typ, err := lookupType(scope, name)
if err != nil {
return nil, err
}
_, ok := typ.Underlying().(*types.Struct)
if !ok {
return nil, errors.New("not a struct type")
}
return typ, nil
}
func lookupType(scope *types.Scope, name string) (*types.Named, error) {
obj := scope.Lookup(name)
if obj == nil {
return nil, errors.New("no such identifier")
}
typ, ok := obj.(*types.TypeName)
if !ok {
return nil, errors.New("not a type")
}
return typ.Type().(*types.Named), nil
}
func isPointer(typ types.Type) bool {
_, ok := typ.(*types.Pointer)
return ok
}
func isSlice(typ types.Type) bool {
_, ok := typ.(*types.Slice)
return ok
}
func isMap(typ types.Type) bool {
_, ok := typ.(*types.Map)
return ok
}
func ensurePointer(typ types.Type) types.Type {
if isPointer(typ) || isSlice(typ) || isMap(typ) {
return typ
}
return types.NewPointer(typ)
}
// checkConvertible determines whether values of type from can be converted to type to. It
// returns nil if convertible and a descriptive error otherwise.
func checkConvertible(from, to types.Type) error {
if types.ConvertibleTo(from, to) {
return nil
}
// Slices.
sfrom, fromIsSlice := from.(*types.Slice)
sto, toIsSlice := to.(*types.Slice)
if fromIsSlice && toIsSlice {
if !types.ConvertibleTo(sfrom.Elem(), sto.Elem()) {
return fmt.Errorf("slice element type %s is not convertible to %s", sfrom.Elem(), sto.Elem())
}
return nil
}
// Maps.
mfrom, fromIsMap := from.(*types.Map)
mto, toIsMap := to.(*types.Map)
if fromIsMap && toIsMap {
if !types.ConvertibleTo(mfrom.Key(), mto.Key()) {
return fmt.Errorf("map key type %s is not convertible to %s", mfrom.Key(), mto.Key())
}
if !types.ConvertibleTo(mfrom.Elem(), mto.Elem()) {
return fmt.Errorf("map element type %s is not convertible to %s", mfrom.Elem(), mto.Elem())
}
return nil
}
return fmt.Errorf("type %s is not convertible to %s", from, to)
}
// fileScope tracks imports and other names at file scope.
type fileScope struct {
imports []*types.Package
importsByName map[string]*types.Package
importNames map[string]string
otherNames map[string]bool // non-package identifiers
pkg *types.Package
imp types.Importer
}
func newFileScope(imp types.Importer, pkg *types.Package) *fileScope {
return &fileScope{otherNames: make(map[string]bool), pkg: pkg, imp: imp}
}
func (s *fileScope) writeImportDecl(w io.Writer) {
fmt.Fprintln(w, "import (")
for _, pkg := range s.imports {
if s.importNames[pkg.Path()] != pkg.Name() {
fmt.Fprintf(w, "\t%s %q\n", s.importNames[pkg.Path()], pkg.Path())
} else {
fmt.Fprintf(w, "\t%q\n", pkg.Path())
}
}
fmt.Fprintln(w, ")")
}
// addImport loads a package and adds it to the import set.
func (s *fileScope) addImport(path string) {
pkg, err := s.imp.Import(path)
if err != nil {
panic(fmt.Errorf("can't import %q: %v", path, err))
}
s.insertImport(pkg)
s.rebuildImports()
}
// addReferences marks all names referenced by typ as used.
func (s *fileScope) addReferences(typ types.Type) {
walkNamedTypes(typ, func(nt *types.Named) {
if nt.Obj().Pkg() == s.pkg {
s.otherNames[nt.Obj().Name()] = true
} else {
s.insertImport(nt.Obj().Pkg())
}
})
s.rebuildImports()
}
// insertImport adds pkg to the list of known imports.
// This method should not be used directly because it doesn't
// rebuild the import name cache.
func (s *fileScope) insertImport(pkg *types.Package) {
i := sort.Search(len(s.imports), func(i int) bool {
return s.imports[i].Path() > pkg.Path()
})
if i < len(s.imports) && s.imports[i] == pkg {
return
}
s.imports = append(s.imports[:i], append([]*types.Package{pkg}, s.imports[i:]...)...)
}
// rebuildImports caches the names of imported packages.
func (s *fileScope) rebuildImports() {
s.importNames = make(map[string]string)
s.importsByName = make(map[string]*types.Package)
for _, pkg := range s.imports {
s.maybeRenameImport(pkg)
}
}
func (s *fileScope) maybeRenameImport(pkg *types.Package) {
name := pkg.Name()
for i := 0; s.isNameTaken(name); i++ {
name = pkg.Name()
if i > 0 {
name += strconv.Itoa(i - 1)
}
}
s.importNames[pkg.Path()] = name
s.importsByName[name] = pkg
}
// isNameTaken reports whether the given name is used by an import or other identifier.
func (s *fileScope) isNameTaken(name string) bool {
return s.importsByName[name] != nil || s.otherNames[name] || types.Universe.Lookup(name) != nil
}
// qualify is a types.Qualifier that prepends the (possibly renamed) package name of
// imported types to a type name.
func (s *fileScope) qualify(pkg *types.Package) string {
if pkg == s.pkg {
return ""
}
return s.packageName(pkg.Path())
}
func (s *fileScope) packageName(path string) string {
name, ok := s.importNames[path]
if !ok {
panic("BUG: missing package " + path)
}
return name
}
// funcScope tracks used identifiers in a function. It can create new identifiers that do
// not clash with the parent scope.
type funcScope struct {
used map[string]bool
parent *fileScope
}
func newFuncScope(parent *fileScope) *funcScope {
return &funcScope{make(map[string]bool), parent}
}
// newIdent creates a new identifier that doesn't clash with any name
// in the scope or its parent file scope.
func (s *funcScope) newIdent(base string) string {
for i := 0; ; i++ {
name := base
if i > 0 {
name += strconv.Itoa(i - 1)
}
if !s.parent.isNameTaken(name) && !s.used[name] {
s.used[name] = true
return name
}
}
}

@ -0,0 +1,78 @@
# gogen
A simplification of Go's `go/ast` package that allows for some
interesting code generation. Currently very rough.
# Examples
## Hello World
```go
package main
import (
"os"
. "github.com/garslo/gogen"
)
func main() {
pkg := Package{Name: "main"}
pkg.Declare(Import{"fmt"})
pkg.Declare(Function{
Name: "main",
Body: []Statement{
CallFunction{
Func: Dotted{Var{"fmt"}, "Println"},
Params: []Expression{Var{`"Hello World!"`}},
},
},
})
pkg.WriteTo(os.Stdout)
}
```
Output:
```go
package main
import "fmt"
func main() {
fmt.Println("Hello World!")
}
```
## More
See the
[examples](https://github.com/garslo/gogen/tree/master/examples)
directory for more examples and a build/run script.
```sh
$ ./run-example.sh for_loop.go
CODE:
package main
import "os"
import "fmt"
func main() {
var i int
for i = 0; i <= 10; i++ {
fmt.Println(i)
}
os.Exit(i)
}
RUN RESULT:
0
1
2
3
4
5
6
7
8
9
10
exit status 11
```

@ -0,0 +1,19 @@
package gogen
import (
"go/ast"
"go/token"
)
type Assign struct {
Lhs Expression
Rhs Expression
}
func (me Assign) Statement() ast.Stmt {
return &ast.AssignStmt{
Tok: token.ASSIGN,
Lhs: []ast.Expr{me.Lhs.Expression()},
Rhs: []ast.Expr{me.Rhs.Expression()},
}
}

@ -0,0 +1,84 @@
package gogen
import (
"go/ast"
"go/token"
)
type LessThan struct {
Lhs Expression
Rhs Expression
}
func (me LessThan) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.LSS,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}
type LessThanOrEqual struct {
Lhs Expression
Rhs Expression
}
func (me LessThanOrEqual) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.LEQ,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}
type GreaterThan struct {
Lhs Expression
Rhs Expression
}
func (me GreaterThan) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.GTR,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}
type GreaterThanOrEqual struct {
Lhs Expression
Rhs Expression
}
func (me GreaterThanOrEqual) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.GEQ,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}
type Equals struct {
Lhs Expression
Rhs Expression
}
func (me Equals) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.EQL,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}
type NotEqual struct {
Lhs Expression
Rhs Expression
}
func (me NotEqual) Expression() ast.Expr {
return &ast.BinaryExpr{
Op: token.NEQ,
X: me.Lhs.Expression(),
Y: me.Rhs.Expression(),
}
}

@ -0,0 +1,25 @@
package gogen
var BuiltinTypes = [...]string{
"int",
"int8",
"int16",
"int32",
"int64",
"uint",
"uint8",
"uint16",
"uint32",
"uint64",
"uintptr",
"float",
"float32",
"float64",
"string",
"bool",
"byte",
"complex128",
"complex64",
"error",
"rune",
}

@ -0,0 +1,7 @@
package gogen
import "go/ast"
type Declaration interface {
Declaration() ast.Decl
}

@ -0,0 +1,35 @@
package gogen
import (
"go/ast"
"go/token"
)
type Declare struct {
Name string
TypeName string
}
func (me Declare) Statement() ast.Stmt {
return &ast.DeclStmt{
Decl: &ast.GenDecl{
Tok: token.VAR,
Specs: []ast.Spec{
&ast.ValueSpec{
Names: []*ast.Ident{
&ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: me.Name,
},
},
},
Type: &ast.Ident{
Name: me.TypeName,
},
},
},
},
}
}

@ -0,0 +1,19 @@
package gogen
import (
"go/ast"
"go/token"
)
type DeclareAndAssign struct {
Lhs Expression
Rhs Expression
}
func (me DeclareAndAssign) Statement() ast.Stmt {
return &ast.AssignStmt{
Tok: token.DEFINE,
Lhs: []ast.Expr{me.Lhs.Expression()},
Rhs: []ast.Expr{me.Rhs.Expression()},
}
}

@ -0,0 +1,17 @@
package gogen
import "go/ast"
type Dotted struct {
Receiver Expression
Name string
}
func (me Dotted) Expression() ast.Expr {
return &ast.SelectorExpr{
X: me.Receiver.Expression(),
Sel: &ast.Ident{
Name: me.Name,
},
}
}

@ -0,0 +1,7 @@
package gogen
import "go/ast"
type Expression interface {
Expression() ast.Expr
}

@ -0,0 +1,40 @@
package gogen
import "go/ast"
type For struct {
Init Statement
Condition Expression
Post Statement
Body []Statement
}
func (me For) Statement() ast.Stmt {
body := make([]ast.Stmt, len(me.Body))
for i, bodyPart := range me.Body {
body[i] = bodyPart.Statement()
}
var (
init ast.Stmt
cond ast.Expr
post ast.Stmt
)
if me.Init != nil {
init = me.Init.Statement()
}
if me.Condition != nil {
cond = me.Condition.Expression()
}
if me.Post != nil {
post = me.Post.Statement()
}
return &ast.ForStmt{
Init: init,
Cond: cond,
Post: post,
Body: &ast.BlockStmt{
List: body,
},
}
}

@ -0,0 +1,127 @@
package gogen
import "go/ast"
type Receiver struct {
Name string
Type Expression
}
func (me Receiver) Ast() *ast.FieldList {
if me.Type == nil {
return nil
}
return &ast.FieldList{
List: []*ast.Field{
&ast.Field{
Names: []*ast.Ident{
&ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: me.Name,
},
},
},
Type: me.Type.Expression(),
},
},
}
}
type Function struct {
Receiver Receiver
Name string
ReturnTypes Types
Parameters Types
Body []Statement
}
func (me Function) Declare(pkg *Package) Function {
pkg.Declare(me)
return me
}
func (me Function) Declaration() ast.Decl {
paramFields := make([]*ast.Field, len(me.Parameters))
for j, param := range me.Parameters {
var names []*ast.Ident
if param.Name != "" {
names = []*ast.Ident{
&ast.Ident{
Name: param.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: param.Name,
},
},
}
}
paramFields[j] = &ast.Field{
Names: names,
Type: &ast.Ident{
Name: param.TypeName,
},
}
}
returnFields := make([]*ast.Field, len(me.ReturnTypes))
for j, ret := range me.ReturnTypes {
var names []*ast.Ident
if ret.Name != "" {
names = []*ast.Ident{
&ast.Ident{
Name: ret.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: ret.Name,
},
},
}
}
returnFields[j] = &ast.Field{
Names: names,
Type: &ast.Ident{
Name: ret.TypeName,
},
}
}
stmts := make([]ast.Stmt, len(me.Body))
for j, stmt := range me.Body {
stmts[j] = stmt.Statement()
}
return &ast.FuncDecl{
Recv: me.Receiver.Ast(),
Name: &ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Fun,
Name: me.Name,
},
},
Type: &ast.FuncType{
Params: &ast.FieldList{
List: paramFields,
},
Results: &ast.FieldList{
List: returnFields,
},
},
Body: &ast.BlockStmt{
List: stmts,
},
}
}
func (me Function) Call(params ...Expression) CallFunction {
// TODO: what if this is a method?
return CallFunction{
Func: Var{me.Name},
Params: params,
}
}
type Functions []Function
func (me *Functions) Add(fn Function) {
*me = append(*me, fn)
}

@ -0,0 +1,37 @@
package gogen
import "go/ast"
type CallFunction struct {
Func Expression
Params []Expression
}
func (me CallFunction) Statement() ast.Stmt {
return &ast.ExprStmt{
X: me.Expression(),
}
}
func (me CallFunction) Expression() ast.Expr {
params := make([]ast.Expr, len(me.Params))
for i, param := range me.Params {
params[i] = param.Expression()
}
return &ast.CallExpr{
Fun: me.Func.Expression(),
Args: params,
}
}
// TODO: Bad name, change it
type Functor struct {
Func Expression
}
func (me Functor) Call(params ...Expression) CallFunction {
return CallFunction{
Func: me.Func,
Params: params,
}
}

@ -0,0 +1,29 @@
package gogen
import "go/ast"
type If struct {
Init Statement
Condition Expression
Body []Statement
}
func (me If) Statement() ast.Stmt {
var (
init ast.Stmt
)
if me.Init != nil {
init = me.Init.Statement()
}
body := make([]ast.Stmt, len(me.Body))
for j, stmt := range me.Body {
body[j] = stmt.Statement()
}
return &ast.IfStmt{
Init: init,
Cond: me.Condition.Expression(),
Body: &ast.BlockStmt{
List: body,
},
}
}

@ -0,0 +1,32 @@
package gogen
import (
"fmt"
"go/ast"
"go/token"
)
type Import struct {
Name string
}
func (me Import) Declaration() ast.Decl {
return &ast.GenDecl{
Tok: token.IMPORT,
Specs: []ast.Spec{
&ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: fmt.Sprintf(`"%s"`, me.Name),
},
},
},
}
}
type Imports []Import
func (me *Imports) Add(imp Import) {
*me = append(*me, imp)
}

@ -0,0 +1,36 @@
package gogen
import (
"go/ast"
"go/format"
"go/token"
"io"
)
type Package struct {
Name string
Declarations []Declaration
}
func (me *Package) Declare(decl Declaration) *Package {
me.Declarations = append(me.Declarations, decl)
return me
}
func (me *Package) Ast() ast.Node {
decls := make([]ast.Decl, len(me.Declarations))
for i, decl := range me.Declarations {
decls[i] = decl.Declaration()
}
return &ast.File{
Name: &ast.Ident{
Name: me.Name,
},
Decls: decls,
}
}
func (me *Package) WriteTo(w io.Writer) error {
fset := token.NewFileSet()
return format.Node(w, fset, me.Ast())
}

@ -0,0 +1,46 @@
package gogen
import (
"go/ast"
"go/token"
)
type Range struct {
Key Expression
Value Expression
RangeValue Expression
Body []Statement
DoNotDeclare bool
}
func (me Range) Statement() ast.Stmt {
body := make([]ast.Stmt, len(me.Body))
for i, bodyPart := range me.Body {
body[i] = bodyPart.Statement()
}
var (
key Expression = Var{"_"}
value Expression = Var{"_"}
)
if me.Key != nil {
key = me.Key
}
if me.Value != nil {
value = me.Value
}
tok := token.DEFINE
if me.DoNotDeclare || (me.Key == nil && me.Value == nil) {
tok = token.ASSIGN
}
return &ast.RangeStmt{
Key: key.Expression(),
Value: value.Expression(),
X: me.RangeValue.Expression(),
Tok: tok,
Body: &ast.BlockStmt{
List: body,
},
}
}

@ -0,0 +1,17 @@
package gogen
import "go/ast"
type Return struct {
Values []Expression
}
func (me Return) Statement() ast.Stmt {
ret := make([]ast.Expr, len(me.Values))
for i, val := range me.Values {
ret[i] = val.Expression()
}
return &ast.ReturnStmt{
Results: ret,
}
}

@ -0,0 +1,13 @@
package gogen
import "go/ast"
type Star struct {
Value Expression
}
func (me Star) Expression() ast.Expr {
return &ast.StarExpr{
X: me.Value.Expression(),
}
}

@ -0,0 +1,7 @@
package gogen
import "go/ast"
type Statement interface {
Statement() ast.Stmt
}

@ -0,0 +1,141 @@
package gogen
const (
ArchiveTar = "archive/tar"
ArchiveZip = "archive/zip"
Bufio = "bufio"
Builtin = "builtin"
Bytes = "bytes"
CompressBzip2 = "compress/bzip2"
CompressFlate = "compress/flate"
CompressGzip = "compress/gzip"
CompressLzw = "compress/lzw"
CompressZlib = "compress/zlib"
ContainerHeap = "container/heap"
ContainerList = "container/list"
ContainerRing = "container/ring"
Crypto = "crypto"
CryptoAes = "crypto/aes"
CryptoCipher = "crypto/cipher"
CryptoDes = "crypto/des"
CryptoDsa = "crypto/dsa"
CryptoEcdsa = "crypto/ecdsa"
CryptoElliptic = "crypto/elliptic"
CryptoHmac = "crypto/hmac"
CryptoMd5 = "crypto/md5"
CryptoRand = "crypto/rand"
CryptoRc4 = "crypto/rc4"
CryptoRsa = "crypto/rsa"
CryptoSha1 = "crypto/sha1"
CryptoSha256 = "crypto/sha256"
CryptoSha512 = "crypto/sha512"
CryptoSubtle = "crypto/subtle"
CryptoTls = "crypto/tls"
CryptoX509 = "crypto/x509"
CryptoX509Pkix = "crypto/x509/pkix"
DatabaseSql = "database/sql"
DatabaseSqlDriver = "database/sql/driver"
DebugDwarf = "debug/dwarf"
DebugElf = "debug/elf"
DebugGosym = "debug/gosym"
DebugMacho = "debug/macho"
DebugPe = "debug/pe"
DebugPlan9obj = "debug/plan9obj"
Encoding = "encoding"
EncodingAscii85 = "encoding/ascii85"
EncodingAsn1 = "encoding/asn1"
EncodingBase32 = "encoding/base32"
EncodingBase64 = "encoding/base64"
EncodingBinary = "encoding/binary"
EncodingCsv = "encoding/csv"
EncodingGob = "encoding/gob"
EncodingHex = "encoding/hex"
EncodingJson = "encoding/json"
EncodingPem = "encoding/pem"
EncodingXml = "encoding/xml"
Errors = "errors"
Expvar = "expvar"
Flag = "flag"
Fmt = "fmt"
GoAst = "go/ast"
GoBuild = "go/build"
GoDoc = "go/doc"
GoFormat = "go/format"
GoParser = "go/parser"
GoPrinter = "go/printer"
GoScanner = "go/scanner"
GoToken = "go/token"
Hash = "hash"
HashAdler32 = "hash/adler32"
HashCrc32 = "hash/crc32"
HashCrc64 = "hash/crc64"
HashFnv = "hash/fnv"
Html = "html"
HtmlTemplate = "html/template"
Image = "image"
ImageColor = "image/color"
ImageColorPalette = "image/color/palette"
ImageDraw = "image/draw"
ImageGif = "image/gif"
ImageJpeg = "image/jpeg"
ImagePng = "image/png"
IndexSuffixarray = "index/suffixarray"
InternalSyscall = "internal/syscall"
Io = "io"
IoIoutil = "io/ioutil"
Log = "Log"
LogSyslog = "log/syslog"
Math = "math"
MathBig = "math/big"
MathCmplx = "math/cmplx"
MathRand = "math/rand"
Mime = "mime"
MimeMultipart = "mime/multipart"
Net = "net"
NetHttp = "net/http"
NetHttpCgi = "net/http/cgi"
NetHttpCookiejar = "net/http/cookiejar"
NetHttpFcgi = "net/http/fcgi"
NetHttpHttptest = "net/http/httptest"
NetHttpHttputil = "net/http/httputil"
NetHttpInternal = "net/http/internal"
NetHttpPprof = "net/http/pprof"
NetMail = "net/mail"
NetRpc = "net/rpc"
NetRpcJsonrpc = "net/rpc/jsonrpc"
NetSmtp = "net/smtp"
NetTextproto = "net/textproto"
NetUrl = "net/url"
Os = "os"
OsExec = "os/exec"
OsSignal = "os/signal"
OsUser = "os/user"
Path = "path"
PathFilepath = "path/filepath"
Reflect = "reflect"
Regexp = "regexp"
RegexpSyntax = "regexp/syntax"
Runtime = "runtime"
RuntimeCgo = "runtime/cgo"
RuntimeDebug = "runtime/debug"
RuntimePprof = "runtime/pprof"
RuntimeRace = "runtime/race"
Sort = "sort"
Strconv = "strconv"
Strings = "strings"
Sync = "sync"
SyncAtomic = "sync/atomic"
Syscall = "syscall"
Testing = "testing"
TestingIotest = "testing/iotest"
TestingQuick = "testing/quick"
TextScanner = "text/scanner"
TextTabwriter = "text/tabwriter"
TextTemplate = "text/template"
TextTemplateParse = "text/template/parse"
Time = "time"
Unicode = "unicode"
UnicodeUtf16 = "unicode/utf16"
UnicodeUtf8 = "unicode/utf8"
Unsafe = "unsafe"
)

@ -0,0 +1,115 @@
package gogen
import (
"go/ast"
"go/token"
)
type Field struct {
Name string
TypeName string
Tag string
}
func (me Field) Ast() *ast.Field {
var tag *ast.BasicLit
if me.Tag != "" {
tag = &ast.BasicLit{
Kind: token.STRING,
Value: "`" + me.Tag + "`",
}
}
names := []*ast.Ident{}
if me.Name != "" {
names = []*ast.Ident{
&ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: me.Name,
},
},
}
}
return &ast.Field{
Names: names,
Type: &ast.Ident{
Name: me.TypeName,
},
Tag: tag,
}
}
type Fields []Field
func (me Fields) Ast() *ast.FieldList {
fields := make([]*ast.Field, len(me))
for i, field := range me {
fields[i] = field.Ast()
}
return &ast.FieldList{
List: fields,
}
}
type Struct struct {
Name string
Fields Fields
Methods Functions
FieldValues map[string]Expression
}
func (me Struct) Declaration() ast.Decl {
return &ast.GenDecl{
Tok: token.TYPE,
Specs: []ast.Spec{
&ast.TypeSpec{
Name: &ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Typ,
Name: me.Name,
},
},
Type: &ast.StructType{
Fields: me.Fields.Ast(),
},
},
},
}
}
func (me Struct) WithValues(vals map[string]Expression) Struct {
return Struct{
Name: me.Name,
Fields: me.Fields,
Methods: me.Methods,
FieldValues: vals,
}
}
func (me Struct) Expression() ast.Expr {
elts := make([]ast.Expr, len(me.Fields))
for i, field := range me.Fields {
elts[i] = &ast.KeyValueExpr{
Key: &ast.Ident{
Name: field.Name,
},
Value: &ast.Ident{
//Value: me.FieldValues[field.Name].Expression(),
},
}
}
return &ast.CompositeLit{
Type: &ast.Ident{
Name: me.Name,
},
Elts: elts,
}
}
type Structs []Struct
func (me *Structs) Add(st Struct) {
*me = append(*me, st)
}

@ -0,0 +1,21 @@
package gogen
import "go/ast"
type Thunk struct {
Expr ast.Expr
Stmt ast.Stmt
Decl ast.Decl
}
func (me Thunk) Expression() ast.Expr {
return me.Expr
}
func (me Thunk) Statement() ast.Stmt {
return me.Stmt
}
func (me Thunk) Declaration() ast.Decl {
return me.Decl
}

@ -0,0 +1,9 @@
package gogen
type Type struct {
Name string // Optional, named type
TypeName string
PackageName string // Optional
}
type Types []Type

@ -0,0 +1,6 @@
package gogen
const (
StringT = "string"
IntT = "int"
)

@ -0,0 +1,61 @@
package gogen
import (
"go/ast"
"go/token"
)
type Increment struct {
Value Expression
}
func (me Increment) Statement() ast.Stmt {
return &ast.IncDecStmt{
X: me.Value.Expression(),
Tok: token.INC,
}
}
type Decrement struct {
Value Expression
}
func (me Decrement) Statement() ast.Stmt {
return &ast.IncDecStmt{
X: me.Value.Expression(),
Tok: token.DEC,
}
}
type Not struct {
Value Expression
}
func (me Not) Expression() ast.Expr {
return &ast.UnaryExpr{
X: me.Value.Expression(),
Op: token.NOT,
}
}
type AddressOf struct {
Value Expression
}
func (me AddressOf) Expression() ast.Expr {
return &ast.UnaryExpr{
X: me.Value.Expression(),
Op: token.AND,
}
}
type Index struct {
Value, Index Expression
}
func (me Index) Expression() ast.Expr {
return &ast.IndexExpr{
X: me.Value.Expression(),
Index: me.Index.Expression(),
}
}

@ -0,0 +1,44 @@
package gogen
import (
"fmt"
"go/ast"
"strconv"
)
type Var struct {
Name string
}
func (me Var) Expression() ast.Expr {
return &ast.Ident{
Name: me.Name,
Obj: &ast.Object{
Kind: ast.Var,
Name: me.Name,
},
}
}
// Things that are like Var but either deserve their own name, or have
// slightly different behaviors
type String struct {
Value string
}
func (me String) Expression() ast.Expr {
return Var{fmt.Sprintf(`"%s"`, me.Value)}.Expression()
}
func Int(value int) Var {
return Var{strconv.Itoa(value)}
}
func Pkg(value string) Var {
return Var{value}
}
func Name(value string) Var {
return Var{value}
}

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -0,0 +1,186 @@
// Copyright 2013 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package diff implements a linewise diff algorithm.
package diff
import (
"bytes"
"fmt"
"strings"
)
// Chunk represents a piece of the diff. A chunk will not have both added and
// deleted lines. Equal lines are always after any added or deleted lines.
// A Chunk may or may not have any lines in it, especially for the first or last
// chunk in a computation.
type Chunk struct {
Added []string
Deleted []string
Equal []string
}
func (c *Chunk) empty() bool {
return len(c.Added) == 0 && len(c.Deleted) == 0 && len(c.Equal) == 0
}
// Diff returns a string containing a line-by-line unified diff of the linewise
// changes required to make A into B. Each line is prefixed with '+', '-', or
// ' ' to indicate if it should be added, removed, or is correct respectively.
func Diff(A, B string) string {
aLines := strings.Split(A, "\n")
bLines := strings.Split(B, "\n")
chunks := DiffChunks(aLines, bLines)
buf := new(bytes.Buffer)
for _, c := range chunks {
for _, line := range c.Added {
fmt.Fprintf(buf, "+%s\n", line)
}
for _, line := range c.Deleted {
fmt.Fprintf(buf, "-%s\n", line)
}
for _, line := range c.Equal {
fmt.Fprintf(buf, " %s\n", line)
}
}
return strings.TrimRight(buf.String(), "\n")
}
// DiffChunks uses an O(D(N+M)) shortest-edit-script algorithm
// to compute the edits required from A to B and returns the
// edit chunks.
func DiffChunks(a, b []string) []Chunk {
// algorithm: http://www.xmailserver.org/diff2.pdf
// We'll need these quantities a lot.
alen, blen := len(a), len(b) // M, N
// At most, it will require len(a) deletions and len(b) additions
// to transform a into b.
maxPath := alen + blen // MAX
if maxPath == 0 {
// degenerate case: two empty lists are the same
return nil
}
// Store the endpoint of the path for diagonals.
// We store only the a index, because the b index on any diagonal
// (which we know during the loop below) is aidx-diag.
// endpoint[maxPath] represents the 0 diagonal.
//
// Stated differently:
// endpoint[d] contains the aidx of a furthest reaching path in diagonal d
endpoint := make([]int, 2*maxPath+1) // V
saved := make([][]int, 0, 8) // Vs
save := func() {
dup := make([]int, len(endpoint))
copy(dup, endpoint)
saved = append(saved, dup)
}
var editDistance int // D
dLoop:
for editDistance = 0; editDistance <= maxPath; editDistance++ {
// The 0 diag(onal) represents equality of a and b. Each diagonal to
// the left is numbered one lower, to the right is one higher, from
// -alen to +blen. Negative diagonals favor differences from a,
// positive diagonals favor differences from b. The edit distance to a
// diagonal d cannot be shorter than d itself.
//
// The iterations of this loop cover either odds or evens, but not both,
// If odd indices are inputs, even indices are outputs and vice versa.
for diag := -editDistance; diag <= editDistance; diag += 2 { // k
var aidx int // x
switch {
case diag == -editDistance:
// This is a new diagonal; copy from previous iter
aidx = endpoint[maxPath-editDistance+1] + 0
case diag == editDistance:
// This is a new diagonal; copy from previous iter
aidx = endpoint[maxPath+editDistance-1] + 1
case endpoint[maxPath+diag+1] > endpoint[maxPath+diag-1]:
// diagonal d+1 was farther along, so use that
aidx = endpoint[maxPath+diag+1] + 0
default:
// diagonal d-1 was farther (or the same), so use that
aidx = endpoint[maxPath+diag-1] + 1
}
// On diagonal d, we can compute bidx from aidx.
bidx := aidx - diag // y
// See how far we can go on this diagonal before we find a difference.
for aidx < alen && bidx < blen && a[aidx] == b[bidx] {
aidx++
bidx++
}
// Store the end of the current edit chain.
endpoint[maxPath+diag] = aidx
// If we've found the end of both inputs, we're done!
if aidx >= alen && bidx >= blen {
save() // save the final path
break dLoop
}
}
save() // save the current path
}
if editDistance == 0 {
return nil
}
chunks := make([]Chunk, editDistance+1)
x, y := alen, blen
for d := editDistance; d > 0; d-- {
endpoint := saved[d]
diag := x - y
insert := diag == -d || (diag != d && endpoint[maxPath+diag-1] < endpoint[maxPath+diag+1])
x1 := endpoint[maxPath+diag]
var x0, xM, kk int
if insert {
kk = diag + 1
x0 = endpoint[maxPath+kk]
xM = x0
} else {
kk = diag - 1
x0 = endpoint[maxPath+kk]
xM = x0 + 1
}
y0 := x0 - kk
var c Chunk
if insert {
c.Added = b[y0:][:1]
} else {
c.Deleted = a[x0:][:1]
}
if xM < x1 {
c.Equal = a[xM:][:x1-xM]
}
x, y = x0, y0
chunks[d] = c
}
if x > 0 {
chunks[0].Equal = a[:x]
}
if chunks[0].empty() {
chunks = chunks[1:]
}
if len(chunks) == 0 {
return nil
}
return chunks
}

27
vendor/golang.org/x/tools/LICENSE generated vendored

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

22
vendor/golang.org/x/tools/PATENTS generated vendored

@ -0,0 +1,22 @@
Additional IP Rights Grant (Patents)
"This implementation" means the copyrightable works distributed by
Google as part of the Go project.
Google hereby grants to You a perpetual, worldwide, non-exclusive,
no-charge, royalty-free, irrevocable (except as stated in this section)
patent license to make, have made, use, offer to sell, sell, import,
transfer and otherwise run, modify and propagate the contents of this
implementation of Go, where such license applies only to those patent
claims, both currently owned or controlled by Google and acquired in
the future, licensable by Google that are necessarily infringed by this
implementation of Go. This grant does not include claims that would be
infringed only as a consequence of further modification of this
implementation. If you or your agent or exclusive licensee institute or
order or agree to the institution of patent litigation against any
entity (including a cross-claim or counterclaim in a lawsuit) alleging
that this implementation of Go or any code incorporated within this
implementation of Go constitutes direct or contributory patent
infringement, or inducement of patent infringement, then any patent
rights granted to you under this License for this implementation of Go
shall terminate as of the date such litigation is filed.

@ -0,0 +1,627 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
// This file defines utilities for working with source positions.
import (
"fmt"
"go/ast"
"go/token"
"sort"
)
// PathEnclosingInterval returns the node that encloses the source
// interval [start, end), and all its ancestors up to the AST root.
//
// The definition of "enclosing" used by this function considers
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
// z := x + y // add them
// <-A->
// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
// This behaviour makes user interfaces more tolerant of imperfect
// input.
//
// This function treats tokens as nodes, though they are not included
// in the result. e.g. PathEnclosingInterval("+") returns the
// enclosing ast.BinaryExpr("x + y").
//
// If start==end, the 1-char interval following start is used instead.
//
// The 'exact' result is true if the interval contains only path[0]
// and perhaps some adjacent whitespace. It is false if the interval
// overlaps multiple children of path[0], or if it contains only
// interior whitespace of path[0].
// In this example:
//
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
// x, +). So too is the 1-char interval D, because it contains only
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
// Precondition: [start, end) both lie within the same file as root.
// TODO(adonovan): return (nil, false) in this case and remove precond.
// Requires FileSet; see loader.tokenFileContainsPos.
//
// Postcondition: path is never nil; it always contains at least 'root'.
//
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
var visit func(node ast.Node) bool
visit = func(node ast.Node) bool {
path = append(path, node)
nodePos := node.Pos()
nodeEnd := node.End()
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
// Intersect [start, end) with interval of node.
if start < nodePos {
start = nodePos
}
if end > nodeEnd {
end = nodeEnd
}
// Find sole child that contains [start, end).
children := childrenOf(node)
l := len(children)
for i, child := range children {
// [childPos, childEnd) is unaugmented interval of child.
childPos := child.Pos()
childEnd := child.End()
// [augPos, augEnd) is whitespace-augmented interval of child.
augPos := childPos
augEnd := childEnd
if i > 0 {
augPos = children[i-1].End() // start of preceding whitespace
}
if i < l-1 {
nextChildPos := children[i+1].Pos()
// Does [start, end) lie between child and next child?
if start >= augEnd && end <= nextChildPos {
return false // inexact match
}
augEnd = nextChildPos // end of following whitespace
}
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
// i, augPos, augEnd, start, end) // debugging
// Does augmented child strictly contain [start, end)?
if augPos <= start && end <= augEnd {
_, isToken := child.(tokenNode)
return isToken || visit(child)
}
// Does [start, end) overlap multiple children?
// i.e. left-augmented child contains start
// but LR-augmented child does not contain end.
if start < childEnd && end > augEnd {
break
}
}
// No single child contained [start, end),
// so node is the result. Is it exact?
// (It's tempting to put this condition before the
// child loop, but it gives the wrong result in the
// case where a node (e.g. ExprStmt) and its sole
// child have equal intervals.)
if start == nodePos && end == nodeEnd {
return true // exact match
}
return false // inexact: overlaps multiple children
}
if start > end {
start, end = end, start
}
if start < root.End() && end > root.Pos() {
if start == end {
end = start + 1 // empty interval => interval of size 1
}
exact = visit(root)
// Reverse the path:
for i, l := 0, len(path); i < l/2; i++ {
path[i], path[l-1-i] = path[l-1-i], path[i]
}
} else {
// Selection lies within whitespace preceding the
// first (or following the last) declaration in the file.
// The result nonetheless always includes the ast.File.
path = append(path, root)
}
return
}
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
//
type tokenNode struct {
pos token.Pos
end token.Pos
}
func (n tokenNode) Pos() token.Pos {
return n.pos
}
func (n tokenNode) End() token.Pos {
return n.end
}
func tok(pos token.Pos, len int) ast.Node {
return tokenNode{pos, pos + token.Pos(len)}
}
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
//
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
// First add nodes for all true subtrees.
ast.Inspect(n, func(node ast.Node) bool {
if node == n { // push n
return true // recur
}
if node != nil { // push child
children = append(children, node)
}
return false // no recursion
})
// Then add fake Nodes for bare tokens.
switch n := n.(type) {
case *ast.ArrayType:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Elt.End(), len("]")))
case *ast.AssignStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.BasicLit:
children = append(children,
tok(n.ValuePos, len(n.Value)))
case *ast.BinaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.BlockStmt:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("}")))
case *ast.BranchStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.CallExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
if n.Ellipsis != 0 {
children = append(children, tok(n.Ellipsis, len("...")))
}
case *ast.CaseClause:
if n.List == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.ChanType:
switch n.Dir {
case ast.RECV:
children = append(children, tok(n.Begin, len("<-chan")))
case ast.SEND:
children = append(children, tok(n.Begin, len("chan<-")))
case ast.RECV | ast.SEND:
children = append(children, tok(n.Begin, len("chan")))
}
case *ast.CommClause:
if n.Comm == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.Comment:
// nop
case *ast.CommentGroup:
// nop
case *ast.CompositeLit:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("{")))
case *ast.DeclStmt:
// nop
case *ast.DeferStmt:
children = append(children,
tok(n.Defer, len("defer")))
case *ast.Ellipsis:
children = append(children,
tok(n.Ellipsis, len("...")))
case *ast.EmptyStmt:
// nop
case *ast.ExprStmt:
// nop
case *ast.Field:
// TODO(adonovan): Field.{Doc,Comment,Tag}?
case *ast.FieldList:
children = append(children,
tok(n.Opening, len("(")),
tok(n.Closing, len(")")))
case *ast.File:
// TODO test: Doc
children = append(children,
tok(n.Package, len("package")))
case *ast.ForStmt:
children = append(children,
tok(n.For, len("for")))
case *ast.FuncDecl:
// TODO(adonovan): FuncDecl.Comment?
// Uniquely, FuncDecl breaks the invariant that
// preorder traversal yields tokens in lexical order:
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
//
// As a workaround, we inline the case for FuncType
// here and order things correctly.
//
children = nil // discard ast.Walk(FuncDecl) info subtrees
children = append(children, tok(n.Type.Func, len("func")))
if n.Recv != nil {
children = append(children, n.Recv)
}
children = append(children, n.Name)
if n.Type.Params != nil {
children = append(children, n.Type.Params)
}
if n.Type.Results != nil {
children = append(children, n.Type.Results)
}
if n.Body != nil {
children = append(children, n.Body)
}
case *ast.FuncLit:
// nop
case *ast.FuncType:
if n.Func != 0 {
children = append(children,
tok(n.Func, len("func")))
}
case *ast.GenDecl:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
if n.Lparen != 0 {
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
}
case *ast.GoStmt:
children = append(children,
tok(n.Go, len("go")))
case *ast.Ident:
children = append(children,
tok(n.NamePos, len(n.Name)))
case *ast.IfStmt:
children = append(children,
tok(n.If, len("if")))
case *ast.ImportSpec:
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
case *ast.IncDecStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.IndexExpr:
children = append(children,
tok(n.Lbrack, len("{")),
tok(n.Rbrack, len("}")))
case *ast.InterfaceType:
children = append(children,
tok(n.Interface, len("interface")))
case *ast.KeyValueExpr:
children = append(children,
tok(n.Colon, len(":")))
case *ast.LabeledStmt:
children = append(children,
tok(n.Colon, len(":")))
case *ast.MapType:
children = append(children,
tok(n.Map, len("map")))
case *ast.ParenExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.RangeStmt:
children = append(children,
tok(n.For, len("for")),
tok(n.TokPos, len(n.Tok.String())))
case *ast.ReturnStmt:
children = append(children,
tok(n.Return, len("return")))
case *ast.SelectStmt:
children = append(children,
tok(n.Select, len("select")))
case *ast.SelectorExpr:
// nop
case *ast.SendStmt:
children = append(children,
tok(n.Arrow, len("<-")))
case *ast.SliceExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.StarExpr:
children = append(children, tok(n.Star, len("*")))
case *ast.StructType:
children = append(children, tok(n.Struct, len("struct")))
case *ast.SwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.TypeAssertExpr:
children = append(children,
tok(n.Lparen-1, len(".")),
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.TypeSpec:
// TODO(adonovan): TypeSpec.{Doc,Comment}?
case *ast.TypeSwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.UnaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.ValueSpec:
// TODO(adonovan): ValueSpec.{Doc,Comment}?
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
// nop
}
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
// the switch above so we can make interleaved callbacks for
// both Nodes and Tokens in the right order and avoid the need
// to sort.
sort.Sort(byPos(children))
return children
}
type byPos []ast.Node
func (sl byPos) Len() int {
return len(sl)
}
func (sl byPos) Less(i, j int) bool {
return sl[i].Pos() < sl[j].Pos()
}
func (sl byPos) Swap(i, j int) {
sl[i], sl[j] = sl[j], sl[i]
}
// NodeDescription returns a description of the concrete type of n suitable
// for a user interface.
//
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
//
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
return "array type"
case *ast.AssignStmt:
return "assignment"
case *ast.BadDecl:
return "bad declaration"
case *ast.BadExpr:
return "bad expression"
case *ast.BadStmt:
return "bad statement"
case *ast.BasicLit:
return "basic literal"
case *ast.BinaryExpr:
return fmt.Sprintf("binary %s operation", n.Op)
case *ast.BlockStmt:
return "block"
case *ast.BranchStmt:
switch n.Tok {
case token.BREAK:
return "break statement"
case token.CONTINUE:
return "continue statement"
case token.GOTO:
return "goto statement"
case token.FALLTHROUGH:
return "fall-through statement"
}
case *ast.CallExpr:
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
return "function call (or conversion)"
}
return "function call"
case *ast.CaseClause:
return "case clause"
case *ast.ChanType:
return "channel type"
case *ast.CommClause:
return "communication clause"
case *ast.Comment:
return "comment"
case *ast.CommentGroup:
return "comment group"
case *ast.CompositeLit:
return "composite literal"
case *ast.DeclStmt:
return NodeDescription(n.Decl) + " statement"
case *ast.DeferStmt:
return "defer statement"
case *ast.Ellipsis:
return "ellipsis"
case *ast.EmptyStmt:
return "empty statement"
case *ast.ExprStmt:
return "expression statement"
case *ast.Field:
// Can be any of these:
// struct {x, y int} -- struct field(s)
// struct {T} -- anon struct field
// interface {I} -- interface embedding
// interface {f()} -- interface method
// func (A) func(B) C -- receiver, param(s), result(s)
return "field/method/parameter"
case *ast.FieldList:
return "field/method/parameter list"
case *ast.File:
return "source file"
case *ast.ForStmt:
return "for loop"
case *ast.FuncDecl:
return "function declaration"
case *ast.FuncLit:
return "function literal"
case *ast.FuncType:
return "function type"
case *ast.GenDecl:
switch n.Tok {
case token.IMPORT:
return "import declaration"
case token.CONST:
return "constant declaration"
case token.TYPE:
return "type declaration"
case token.VAR:
return "variable declaration"
}
case *ast.GoStmt:
return "go statement"
case *ast.Ident:
return "identifier"
case *ast.IfStmt:
return "if statement"
case *ast.ImportSpec:
return "import specification"
case *ast.IncDecStmt:
if n.Tok == token.INC {
return "increment statement"
}
return "decrement statement"
case *ast.IndexExpr:
return "index expression"
case *ast.InterfaceType:
return "interface type"
case *ast.KeyValueExpr:
return "key/value association"
case *ast.LabeledStmt:
return "statement label"
case *ast.MapType:
return "map type"
case *ast.Package:
return "package"
case *ast.ParenExpr:
return "parenthesized " + NodeDescription(n.X)
case *ast.RangeStmt:
return "range loop"
case *ast.ReturnStmt:
return "return statement"
case *ast.SelectStmt:
return "select statement"
case *ast.SelectorExpr:
return "selector"
case *ast.SendStmt:
return "channel send"
case *ast.SliceExpr:
return "slice expression"
case *ast.StarExpr:
return "*-operation" // load/store expr or pointer type
case *ast.StructType:
return "struct type"
case *ast.SwitchStmt:
return "switch statement"
case *ast.TypeAssertExpr:
return "type assertion"
case *ast.TypeSpec:
return "type specification"
case *ast.TypeSwitchStmt:
return "type switch"
case *ast.UnaryExpr:
return fmt.Sprintf("unary %s operation", n.Op)
case *ast.ValueSpec:
return "value specification"
}
panic(fmt.Sprintf("unexpected node type: %T", n))
}

@ -0,0 +1,450 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package astutil contains common utilities for working with the Go AST.
package astutil // import "golang.org/x/tools/go/ast/astutil"
import (
"fmt"
"go/ast"
"go/token"
"strconv"
"strings"
)
// AddImport adds the import path to the file f, if absent.
func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
return AddNamedImport(fset, f, "", ipath)
}
// AddNamedImport adds the import path to the file f, if absent.
// If name is not empty, it is used to rename the import.
//
// For example, calling
// AddNamedImport(fset, f, "pathpkg", "path")
// adds
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
if imports(f, ipath) {
return false
}
newImport := &ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: strconv.Quote(ipath),
},
}
if name != "" {
newImport.Name = &ast.Ident{Name: name}
}
// Find an import decl to add to.
// The goal is to find an existing import
// whose import path has the longest shared
// prefix with ipath.
var (
bestMatch = -1 // length of longest shared prefix
lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match
)
for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl)
if ok && gen.Tok == token.IMPORT {
lastImport = i
// Do not add to import "C", to avoid disrupting the
// association with its doc comment, breaking cgo.
if declImports(gen, "C") {
continue
}
// Match an empty import decl if that's all that is available.
if len(gen.Specs) == 0 && bestMatch == -1 {
impDecl = gen
}
// Compute longest shared prefix with imports in this group.
for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
n := matchLen(importPath(impspec), ipath)
if n > bestMatch {
bestMatch = n
impDecl = gen
impIndex = j
}
}
}
}
// If no import decl found, add one after the last import.
if impDecl == nil {
impDecl = &ast.GenDecl{
Tok: token.IMPORT,
}
if lastImport >= 0 {
impDecl.TokPos = f.Decls[lastImport].End()
} else {
// There are no existing imports.
// Our new import goes after the package declaration and after
// the comment, if any, that starts on the same line as the
// package declaration.
impDecl.TokPos = f.Package
file := fset.File(f.Package)
pkgLine := file.Line(f.Package)
for _, c := range f.Comments {
if file.Line(c.Pos()) > pkgLine {
break
}
impDecl.TokPos = c.End()
}
}
f.Decls = append(f.Decls, nil)
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
f.Decls[lastImport+1] = impDecl
}
// Insert new import at insertAt.
insertAt := 0
if impIndex >= 0 {
// insert after the found import
insertAt = impIndex + 1
}
impDecl.Specs = append(impDecl.Specs, nil)
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
impDecl.Specs[insertAt] = newImport
pos := impDecl.Pos()
if insertAt > 0 {
// If there is a comment after an existing import, preserve the comment
// position by adding the new import after the comment.
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
pos = spec.Comment.End()
} else {
// Assign same position as the previous import,
// so that the sorter sees it as being in the same block.
pos = impDecl.Specs[insertAt-1].Pos()
}
}
if newImport.Name != nil {
newImport.Name.NamePos = pos
}
newImport.Path.ValuePos = pos
newImport.EndPos = pos
// Clean up parens. impDecl contains at least one spec.
if len(impDecl.Specs) == 1 {
// Remove unneeded parens.
impDecl.Lparen = token.NoPos
} else if !impDecl.Lparen.IsValid() {
// impDecl needs parens added.
impDecl.Lparen = impDecl.Specs[0].Pos()
}
f.Imports = append(f.Imports, newImport)
if len(f.Decls) <= 1 {
return true
}
// Merge all the import declarations into the first one.
var first *ast.GenDecl
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
continue
}
if first == nil {
first = gen
continue // Don't touch the first one.
}
// We now know there is more than one package in this import
// declaration. Ensure that it ends up parenthesized.
first.Lparen = first.Pos()
// Move the imports of the other import declaration to the first one.
for _, spec := range gen.Specs {
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
i--
}
return true
}
// DeleteImport deletes the import path from the file f, if present.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path)
}
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
var delspecs []*ast.ImportSpec
var delcomments []*ast.CommentGroup
// Find the import nodes that import path, if any.
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT {
continue
}
for j := 0; j < len(gen.Specs); j++ {
spec := gen.Specs[j]
impspec := spec.(*ast.ImportSpec)
if impspec.Name == nil && name != "" {
continue
}
if impspec.Name != nil && impspec.Name.Name != name {
continue
}
if importPath(impspec) != path {
continue
}
// We found an import spec that imports path.
// Delete it.
delspecs = append(delspecs, impspec)
deleted = true
copy(gen.Specs[j:], gen.Specs[j+1:])
gen.Specs = gen.Specs[:len(gen.Specs)-1]
// If this was the last import spec in this decl,
// delete the decl, too.
if len(gen.Specs) == 0 {
copy(f.Decls[i:], f.Decls[i+1:])
f.Decls = f.Decls[:len(f.Decls)-1]
i--
break
} else if len(gen.Specs) == 1 {
if impspec.Doc != nil {
delcomments = append(delcomments, impspec.Doc)
}
if impspec.Comment != nil {
delcomments = append(delcomments, impspec.Comment)
}
for _, cg := range f.Comments {
// Found comment on the same line as the import spec.
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
delcomments = append(delcomments, cg)
break
}
}
spec := gen.Specs[0].(*ast.ImportSpec)
// Move the documentation right after the import decl.
if spec.Doc != nil {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
}
for _, cg := range f.Comments {
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
break
}
}
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
line := fset.Position(impspec.Path.ValuePos).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
if line-lastLine > 1 {
// There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole.
// Do nothing.
} else {
// There was no blank line. Close the hole.
fset.File(gen.Rparen).MergeLine(line)
}
}
j--
}
}
// Delete imports from f.Imports.
for i := 0; i < len(f.Imports); i++ {
imp := f.Imports[i]
for j, del := range delspecs {
if imp == del {
copy(f.Imports[i:], f.Imports[i+1:])
f.Imports = f.Imports[:len(f.Imports)-1]
copy(delspecs[j:], delspecs[j+1:])
delspecs = delspecs[:len(delspecs)-1]
i--
break
}
}
}
// Delete comments from f.Comments.
for i := 0; i < len(f.Comments); i++ {
cg := f.Comments[i]
for j, del := range delcomments {
if cg == del {
copy(f.Comments[i:], f.Comments[i+1:])
f.Comments = f.Comments[:len(f.Comments)-1]
copy(delcomments[j:], delcomments[j+1:])
delcomments = delcomments[:len(delcomments)-1]
i--
break
}
}
}
if len(delspecs) > 0 {
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
}
return
}
// RewriteImport rewrites any import of path oldPath to path newPath.
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
for _, imp := range f.Imports {
if importPath(imp) == oldPath {
rewrote = true
// record old End, because the default is to compute
// it using the length of imp.Path.Value.
imp.EndPos = imp.End()
imp.Path.Value = strconv.Quote(newPath)
}
}
return
}
// UsesImport reports whether a given import is used.
func UsesImport(f *ast.File, path string) (used bool) {
spec := importSpec(f, path)
if spec == nil {
return
}
name := spec.Name.String()
switch name {
case "<nil>":
// If the package name is not explicitly specified,
// make an educated guess. This is not guaranteed to be correct.
lastSlash := strings.LastIndex(path, "/")
if lastSlash == -1 {
name = path
} else {
name = path[lastSlash+1:]
}
case "_", ".":
// Not sure if this import is used - err on the side of caution.
return true
}
ast.Walk(visitFn(func(n ast.Node) {
sel, ok := n.(*ast.SelectorExpr)
if ok && isTopName(sel.X, name) {
used = true
}
}), f)
return
}
type visitFn func(node ast.Node)
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
fn(node)
return fn
}
// imports returns true if f imports path.
func imports(f *ast.File, path string) bool {
return importSpec(f, path) != nil
}
// importSpec returns the import spec if f imports path,
// or nil otherwise.
func importSpec(f *ast.File, path string) *ast.ImportSpec {
for _, s := range f.Imports {
if importPath(s) == path {
return s
}
}
return nil
}
// importPath returns the unquoted import path of s,
// or "" if the path is not properly quoted.
func importPath(s *ast.ImportSpec) string {
t, err := strconv.Unquote(s.Path.Value)
if err == nil {
return t
}
return ""
}
// declImports reports whether gen contains an import of path.
func declImports(gen *ast.GenDecl, path string) bool {
if gen.Tok != token.IMPORT {
return false
}
for _, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
if importPath(impspec) == path {
return true
}
}
return false
}
// matchLen returns the length of the longest path segment prefix shared by x and y.
func matchLen(x, y string) int {
n := 0
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
if x[i] == '/' {
n++
}
}
return n
}
// isTopName returns true if n is a top-level unresolved identifier with the given name.
func isTopName(n ast.Expr, name string) bool {
id, ok := n.(*ast.Ident)
return ok && id.Name == name && id.Obj == nil
}
// Imports returns the file imports grouped by paragraph.
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
var groups [][]*ast.ImportSpec
for _, decl := range f.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.IMPORT {
break
}
group := []*ast.ImportSpec{}
var lastLine int
for _, spec := range genDecl.Specs {
importSpec := spec.(*ast.ImportSpec)
pos := importSpec.Path.ValuePos
line := fset.Position(pos).Line
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
groups = append(groups, group)
group = []*ast.ImportSpec{}
}
group = append(group, importSpec)
lastLine = line
}
groups = append(groups, group)
}
return groups
}

@ -0,0 +1,14 @@
package astutil
import "go/ast"
// Unparen returns e with any enclosing parentheses stripped.
func Unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

@ -0,0 +1,172 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// A faster implementation of filepath.Walk.
//
// filepath.Walk's design necessarily calls os.Lstat on each file,
// even if the caller needs less info. And goimports only need to know
// the type of each file. The kernel interface provides the type in
// the Readdir call but the standard library ignored it.
// fastwalk_unix.go contains a fork of the syscall routines.
//
// See golang.org/issue/16399
package imports
import (
"errors"
"os"
"path/filepath"
"runtime"
)
// traverseLink is a sentinel error for fastWalk, similar to filepath.SkipDir.
var traverseLink = errors.New("traverse symlink, assuming target is a directory")
// fastWalk walks the file tree rooted at root, calling walkFn for
// each file or directory in the tree, including root.
//
// If fastWalk returns filepath.SkipDir, the directory is skipped.
//
// Unlike filepath.Walk:
// * file stat calls must be done by the user.
// The only provided metadata is the file type, which does not include
// any permission bits.
// * multiple goroutines stat the filesystem concurrently. The provided
// walkFn must be safe for concurrent use.
// * fastWalk can follow symlinks if walkFn returns the traverseLink
// sentinel error. It is the walkFn's responsibility to prevent
// fastWalk from going into symlink cycles.
func fastWalk(root string, walkFn func(path string, typ os.FileMode) error) error {
// TODO(bradfitz): make numWorkers configurable? We used a
// minimum of 4 to give the kernel more info about multiple
// things we want, in hopes its I/O scheduling can take
// advantage of that. Hopefully most are in cache. Maybe 4 is
// even too low of a minimum. Profile more.
numWorkers := 4
if n := runtime.NumCPU(); n > numWorkers {
numWorkers = n
}
w := &walker{
fn: walkFn,
enqueuec: make(chan walkItem, numWorkers), // buffered for performance
workc: make(chan walkItem, numWorkers), // buffered for performance
donec: make(chan struct{}),
// buffered for correctness & not leaking goroutines:
resc: make(chan error, numWorkers),
}
defer close(w.donec)
// TODO(bradfitz): start the workers as needed? maybe not worth it.
for i := 0; i < numWorkers; i++ {
go w.doWork()
}
todo := []walkItem{{dir: root}}
out := 0
for {
workc := w.workc
var workItem walkItem
if len(todo) == 0 {
workc = nil
} else {
workItem = todo[len(todo)-1]
}
select {
case workc <- workItem:
todo = todo[:len(todo)-1]
out++
case it := <-w.enqueuec:
todo = append(todo, it)
case err := <-w.resc:
out--
if err != nil {
return err
}
if out == 0 && len(todo) == 0 {
// It's safe to quit here, as long as the buffered
// enqueue channel isn't also readable, which might
// happen if the worker sends both another unit of
// work and its result before the other select was
// scheduled and both w.resc and w.enqueuec were
// readable.
select {
case it := <-w.enqueuec:
todo = append(todo, it)
default:
return nil
}
}
}
}
}
// doWork reads directories as instructed (via workc) and runs the
// user's callback function.
func (w *walker) doWork() {
for {
select {
case <-w.donec:
return
case it := <-w.workc:
w.resc <- w.walk(it.dir, !it.callbackDone)
}
}
}
type walker struct {
fn func(path string, typ os.FileMode) error
donec chan struct{} // closed on fastWalk's return
workc chan walkItem // to workers
enqueuec chan walkItem // from workers
resc chan error // from workers
}
type walkItem struct {
dir string
callbackDone bool // callback already called; don't do it again
}
func (w *walker) enqueue(it walkItem) {
select {
case w.enqueuec <- it:
case <-w.donec:
}
}
func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
joined := dirName + string(os.PathSeparator) + baseName
if typ == os.ModeDir {
w.enqueue(walkItem{dir: joined})
return nil
}
err := w.fn(joined, typ)
if typ == os.ModeSymlink {
if err == traverseLink {
// Set callbackDone so we don't call it twice for both the
// symlink-as-symlink and the symlink-as-directory later:
w.enqueue(walkItem{dir: joined, callbackDone: true})
return nil
}
if err == filepath.SkipDir {
// Permit SkipDir on symlinks too.
return nil
}
}
return err
}
func (w *walker) walk(root string, runUserCallback bool) error {
if runUserCallback {
err := w.fn(root, os.ModeDir)
if err == filepath.SkipDir {
return nil
}
if err != nil {
return err
}
}
return readDir(root, w.onDirEnt)
}

@ -0,0 +1,13 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build freebsd openbsd netbsd
package imports
import "syscall"
func direntInode(dirent *syscall.Dirent) uint64 {
return uint64(dirent.Fileno)
}

@ -0,0 +1,13 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux,!appengine darwin
package imports
import "syscall"
func direntInode(dirent *syscall.Dirent) uint64 {
return uint64(dirent.Ino)
}

@ -0,0 +1,29 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
package imports
import (
"io/ioutil"
"os"
)
// readDir calls fn for each directory entry in dirName.
// It does not descend into directories or follow symlinks.
// If fn returns a non-nil error, readDir returns with that error
// immediately.
func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
fis, err := ioutil.ReadDir(dirName)
if err != nil {
return err
}
for _, fi := range fis {
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
return err
}
}
return nil
}

@ -0,0 +1,122 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux,!appengine darwin freebsd openbsd netbsd
package imports
import (
"bytes"
"fmt"
"os"
"syscall"
"unsafe"
)
const blockSize = 8 << 10
// unknownFileMode is a sentinel (and bogus) os.FileMode
// value used to represent a syscall.DT_UNKNOWN Dirent.Type.
const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
fd, err := syscall.Open(dirName, 0, 0)
if err != nil {
return err
}
defer syscall.Close(fd)
// The buffer must be at least a block long.
buf := make([]byte, blockSize) // stack-allocated; doesn't escape
bufp := 0 // starting read position in buf
nbuf := 0 // end valid data in buf
for {
if bufp >= nbuf {
bufp = 0
nbuf, err = syscall.ReadDirent(fd, buf)
if err != nil {
return os.NewSyscallError("readdirent", err)
}
if nbuf <= 0 {
return nil
}
}
consumed, name, typ := parseDirEnt(buf[bufp:nbuf])
bufp += consumed
if name == "" || name == "." || name == ".." {
continue
}
// Fallback for filesystems (like old XFS) that don't
// support Dirent.Type and have DT_UNKNOWN (0) there
// instead.
if typ == unknownFileMode {
fi, err := os.Lstat(dirName + "/" + name)
if err != nil {
// It got deleted in the meantime.
if os.IsNotExist(err) {
continue
}
return err
}
typ = fi.Mode() & os.ModeType
}
if err := fn(dirName, name, typ); err != nil {
return err
}
}
}
func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
// golang.org/issue/15653
dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0]))
if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
}
if len(buf) < int(dirent.Reclen) {
panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen))
}
consumed = int(dirent.Reclen)
if direntInode(dirent) == 0 { // File absent in directory.
return
}
switch dirent.Type {
case syscall.DT_REG:
typ = 0
case syscall.DT_DIR:
typ = os.ModeDir
case syscall.DT_LNK:
typ = os.ModeSymlink
case syscall.DT_BLK:
typ = os.ModeDevice
case syscall.DT_FIFO:
typ = os.ModeNamedPipe
case syscall.DT_SOCK:
typ = os.ModeSocket
case syscall.DT_UNKNOWN:
typ = unknownFileMode
default:
// Skip weird things.
// It's probably a DT_WHT (http://lwn.net/Articles/325369/)
// or something. Revisit if/when this package is moved outside
// of goimports. goimports only cares about regular files,
// symlinks, and directories.
return
}
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
nameLen := bytes.IndexByte(nameBuf[:], 0)
if nameLen < 0 {
panic("failed to find terminating 0 byte in dirent")
}
// Special cases for common things:
if nameLen == 1 && nameBuf[0] == '.' {
name = "."
} else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
name = ".."
} else {
name = string(nameBuf[:nameLen])
}
return
}

@ -0,0 +1,978 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package imports
import (
"bufio"
"bytes"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"sort"
"strings"
"sync"
"golang.org/x/tools/go/ast/astutil"
)
// Debug controls verbose logging.
var Debug = false
var (
inTests = false // set true by fix_test.go; if false, no need to use testMu
testMu sync.RWMutex // guards globals reset by tests; used only if inTests
)
// If set, LocalPrefix instructs Process to sort import paths with the given
// prefix into another group after 3rd-party packages.
var LocalPrefix string
// importToGroup is a list of functions which map from an import path to
// a group number.
var importToGroup = []func(importPath string) (num int, ok bool){
func(importPath string) (num int, ok bool) {
if LocalPrefix != "" && strings.HasPrefix(importPath, LocalPrefix) {
return 3, true
}
return
},
func(importPath string) (num int, ok bool) {
if strings.HasPrefix(importPath, "appengine") {
return 2, true
}
return
},
func(importPath string) (num int, ok bool) {
if strings.Contains(importPath, ".") {
return 1, true
}
return
},
}
func importGroup(importPath string) int {
for _, fn := range importToGroup {
if n, ok := fn(importPath); ok {
return n
}
}
return 0
}
// packageInfo is a summary of features found in a package.
type packageInfo struct {
Globals map[string]bool // symbol => true
}
// dirPackageInfo gets information from other files in the package.
func dirPackageInfo(srcDir, filename string) (*packageInfo, error) {
considerTests := strings.HasSuffix(filename, "_test.go")
// Handle file from stdin
if _, err := os.Stat(filename); err != nil {
if os.IsNotExist(err) {
return &packageInfo{}, nil
}
return nil, err
}
fileBase := filepath.Base(filename)
packageFileInfos, err := ioutil.ReadDir(srcDir)
if err != nil {
return nil, err
}
info := &packageInfo{Globals: make(map[string]bool)}
for _, fi := range packageFileInfos {
if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
continue
}
if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
continue
}
fileSet := token.NewFileSet()
root, err := parser.ParseFile(fileSet, filepath.Join(srcDir, fi.Name()), nil, 0)
if err != nil {
continue
}
for _, decl := range root.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
for _, spec := range genDecl.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
continue
}
info.Globals[valueSpec.Names[0].Name] = true
}
}
}
return info, nil
}
func fixImports(fset *token.FileSet, f *ast.File, filename string) (added []string, err error) {
// refs are a set of possible package references currently unsatisfied by imports.
// first key: either base package (e.g. "fmt") or renamed package
// second key: referenced package symbol (e.g. "Println")
refs := make(map[string]map[string]bool)
// decls are the current package imports. key is base package or renamed package.
decls := make(map[string]*ast.ImportSpec)
abs, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
srcDir := filepath.Dir(abs)
if Debug {
log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
}
var packageInfo *packageInfo
var loadedPackageInfo bool
// collect potential uses of packages.
var visitor visitFn
visitor = visitFn(func(node ast.Node) ast.Visitor {
if node == nil {
return visitor
}
switch v := node.(type) {
case *ast.ImportSpec:
if v.Name != nil {
decls[v.Name.Name] = v
break
}
ipath := strings.Trim(v.Path.Value, `"`)
if ipath == "C" {
break
}
local := importPathToName(ipath, srcDir)
decls[local] = v
case *ast.SelectorExpr:
xident, ok := v.X.(*ast.Ident)
if !ok {
break
}
if xident.Obj != nil {
// if the parser can resolve it, it's not a package ref
break
}
pkgName := xident.Name
if refs[pkgName] == nil {
refs[pkgName] = make(map[string]bool)
}
if !loadedPackageInfo {
loadedPackageInfo = true
packageInfo, _ = dirPackageInfo(srcDir, filename)
}
if decls[pkgName] == nil && (packageInfo == nil || !packageInfo.Globals[pkgName]) {
refs[pkgName][v.Sel.Name] = true
}
}
return visitor
})
ast.Walk(visitor, f)
// Nil out any unused ImportSpecs, to be removed in following passes
unusedImport := map[string]string{}
for pkg, is := range decls {
if refs[pkg] == nil && pkg != "_" && pkg != "." {
name := ""
if is.Name != nil {
name = is.Name.Name
}
unusedImport[strings.Trim(is.Path.Value, `"`)] = name
}
}
for ipath, name := range unusedImport {
if ipath == "C" {
// Don't remove cgo stuff.
continue
}
astutil.DeleteNamedImport(fset, f, name, ipath)
}
for pkgName, symbols := range refs {
if len(symbols) == 0 {
// skip over packages already imported
delete(refs, pkgName)
}
}
// Search for imports matching potential package references.
searches := 0
type result struct {
ipath string // import path (if err == nil)
name string // optional name to rename import as
err error
}
results := make(chan result)
for pkgName, symbols := range refs {
go func(pkgName string, symbols map[string]bool) {
ipath, rename, err := findImport(pkgName, symbols, filename)
r := result{ipath: ipath, err: err}
if rename {
r.name = pkgName
}
results <- r
}(pkgName, symbols)
searches++
}
for i := 0; i < searches; i++ {
result := <-results
if result.err != nil {
return nil, result.err
}
if result.ipath != "" {
if result.name != "" {
astutil.AddNamedImport(fset, f, result.name, result.ipath)
} else {
astutil.AddImport(fset, f, result.ipath)
}
added = append(added, result.ipath)
}
}
return added, nil
}
// importPathToName returns the package name for the given import path.
var importPathToName func(importPath, srcDir string) (packageName string) = importPathToNameGoPath
// importPathToNameBasic assumes the package name is the base of import path.
func importPathToNameBasic(importPath, srcDir string) (packageName string) {
return path.Base(importPath)
}
// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
// If there's a problem, it falls back to using importPathToNameBasic.
func importPathToNameGoPath(importPath, srcDir string) (packageName string) {
// Fast path for standard library without going to disk.
if pkg, ok := stdImportPackage[importPath]; ok {
return pkg
}
pkgName, err := importPathToNameGoPathParse(importPath, srcDir)
if Debug {
log.Printf("importPathToNameGoPathParse(%q, srcDir=%q) = %q, %v", importPath, srcDir, pkgName, err)
}
if err == nil {
return pkgName
}
return importPathToNameBasic(importPath, srcDir)
}
// importPathToNameGoPathParse is a faster version of build.Import if
// the only thing desired is the package name. It uses build.FindOnly
// to find the directory and then only parses one file in the package,
// trusting that the files in the directory are consistent.
func importPathToNameGoPathParse(importPath, srcDir string) (packageName string, err error) {
buildPkg, err := build.Import(importPath, srcDir, build.FindOnly)
if err != nil {
return "", err
}
d, err := os.Open(buildPkg.Dir)
if err != nil {
return "", err
}
names, err := d.Readdirnames(-1)
d.Close()
if err != nil {
return "", err
}
sort.Strings(names) // to have predictable behavior
var lastErr error
var nfile int
for _, name := range names {
if !strings.HasSuffix(name, ".go") {
continue
}
if strings.HasSuffix(name, "_test.go") {
continue
}
nfile++
fullFile := filepath.Join(buildPkg.Dir, name)
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
if err != nil {
lastErr = err
continue
}
pkgName := f.Name.Name
if pkgName == "documentation" {
// Special case from go/build.ImportDir, not
// handled by ctx.MatchFile.
continue
}
if pkgName == "main" {
// Also skip package main, assuming it's a +build ignore generator or example.
// Since you can't import a package main anyway, there's no harm here.
continue
}
return pkgName, nil
}
if lastErr != nil {
return "", lastErr
}
return "", fmt.Errorf("no importable package found in %d Go files", nfile)
}
var stdImportPackage = map[string]string{} // "net/http" => "http"
func init() {
// Nothing in the standard library has a package name not
// matching its import base name.
for _, pkg := range stdlib {
if _, ok := stdImportPackage[pkg]; !ok {
stdImportPackage[pkg] = path.Base(pkg)
}
}
}
// Directory-scanning state.
var (
// scanGoRootOnce guards calling scanGoRoot (for $GOROOT)
scanGoRootOnce sync.Once
// scanGoPathOnce guards calling scanGoPath (for $GOPATH)
scanGoPathOnce sync.Once
// populateIgnoreOnce guards calling populateIgnore
populateIgnoreOnce sync.Once
ignoredDirs []os.FileInfo
dirScanMu sync.RWMutex
dirScan map[string]*pkg // abs dir path => *pkg
)
type pkg struct {
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPath string // full pkg import path ("net/http", "foo/bar/vendor/a/b")
importPathShort string // vendorless import path ("net/http", "a/b")
}
// byImportPathShortLength sorts by the short import path length, breaking ties on the
// import string itself.
type byImportPathShortLength []*pkg
func (s byImportPathShortLength) Len() int { return len(s) }
func (s byImportPathShortLength) Less(i, j int) bool {
vi, vj := s[i].importPathShort, s[j].importPathShort
return len(vi) < len(vj) || (len(vi) == len(vj) && vi < vj)
}
func (s byImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
// gate is a semaphore for limiting concurrency.
type gate chan struct{}
func (g gate) enter() { g <- struct{}{} }
func (g gate) leave() { <-g }
var visitedSymlinks struct {
sync.Mutex
m map[string]struct{}
}
// guarded by populateIgnoreOnce; populates ignoredDirs.
func populateIgnore() {
for _, srcDir := range build.Default.SrcDirs() {
if srcDir == filepath.Join(build.Default.GOROOT, "src") {
continue
}
populateIgnoredDirs(srcDir)
}
}
// populateIgnoredDirs reads an optional config file at <path>/.goimportsignore
// of relative directories to ignore when scanning for go files.
// The provided path is one of the $GOPATH entries with "src" appended.
func populateIgnoredDirs(path string) {
file := filepath.Join(path, ".goimportsignore")
slurp, err := ioutil.ReadFile(file)
if Debug {
if err != nil {
log.Print(err)
} else {
log.Printf("Read %s", file)
}
}
if err != nil {
return
}
bs := bufio.NewScanner(bytes.NewReader(slurp))
for bs.Scan() {
line := strings.TrimSpace(bs.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue
}
full := filepath.Join(path, line)
if fi, err := os.Stat(full); err == nil {
ignoredDirs = append(ignoredDirs, fi)
if Debug {
log.Printf("Directory added to ignore list: %s", full)
}
} else if Debug {
log.Printf("Error statting entry in .goimportsignore: %v", err)
}
}
}
func skipDir(fi os.FileInfo) bool {
for _, ignoredDir := range ignoredDirs {
if os.SameFile(fi, ignoredDir) {
return true
}
}
return false
}
// shouldTraverse reports whether the symlink fi should, found in dir,
// should be followed. It makes sure symlinks were never visited
// before to avoid symlink loops.
func shouldTraverse(dir string, fi os.FileInfo) bool {
path := filepath.Join(dir, fi.Name())
target, err := filepath.EvalSymlinks(path)
if err != nil {
if !os.IsNotExist(err) {
fmt.Fprintln(os.Stderr, err)
}
return false
}
ts, err := os.Stat(target)
if err != nil {
fmt.Fprintln(os.Stderr, err)
return false
}
if !ts.IsDir() {
return false
}
if skipDir(ts) {
return false
}
realParent, err := filepath.EvalSymlinks(dir)
if err != nil {
fmt.Fprint(os.Stderr, err)
return false
}
realPath := filepath.Join(realParent, fi.Name())
visitedSymlinks.Lock()
defer visitedSymlinks.Unlock()
if visitedSymlinks.m == nil {
visitedSymlinks.m = make(map[string]struct{})
}
if _, ok := visitedSymlinks.m[realPath]; ok {
return false
}
visitedSymlinks.m[realPath] = struct{}{}
return true
}
var testHookScanDir = func(dir string) {}
var scanGoRootDone = make(chan struct{}) // closed when scanGoRoot is done
func scanGoRoot() {
go func() {
scanGoDirs(true)
close(scanGoRootDone)
}()
}
func scanGoPath() { scanGoDirs(false) }
func scanGoDirs(goRoot bool) {
if Debug {
which := "$GOROOT"
if !goRoot {
which = "$GOPATH"
}
log.Printf("scanning " + which)
defer log.Printf("scanned " + which)
}
dirScanMu.Lock()
if dirScan == nil {
dirScan = make(map[string]*pkg)
}
dirScanMu.Unlock()
for _, srcDir := range build.Default.SrcDirs() {
isGoroot := srcDir == filepath.Join(build.Default.GOROOT, "src")
if isGoroot != goRoot {
continue
}
testHookScanDir(srcDir)
walkFn := func(path string, typ os.FileMode) error {
dir := filepath.Dir(path)
if typ.IsRegular() {
if dir == srcDir {
// Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src.
return nil
}
if !strings.HasSuffix(path, ".go") {
return nil
}
dirScanMu.Lock()
if _, dup := dirScan[dir]; !dup {
importpath := filepath.ToSlash(dir[len(srcDir)+len("/"):])
dirScan[dir] = &pkg{
importPath: importpath,
importPathShort: vendorlessImportPath(importpath),
dir: dir,
}
}
dirScanMu.Unlock()
return nil
}
if typ == os.ModeDir {
base := filepath.Base(path)
if base == "" || base[0] == '.' || base[0] == '_' ||
base == "testdata" || base == "node_modules" {
return filepath.SkipDir
}
fi, err := os.Lstat(path)
if err == nil && skipDir(fi) {
if Debug {
log.Printf("skipping directory %q under %s", fi.Name(), dir)
}
return filepath.SkipDir
}
return nil
}
if typ == os.ModeSymlink {
base := filepath.Base(path)
if strings.HasPrefix(base, ".#") {
// Emacs noise.
return nil
}
fi, err := os.Lstat(path)
if err != nil {
// Just ignore it.
return nil
}
if shouldTraverse(dir, fi) {
return traverseLink
}
}
return nil
}
if err := fastWalk(srcDir, walkFn); err != nil {
log.Printf("goimports: scanning directory %v: %v", srcDir, err)
}
}
}
// vendorlessImportPath returns the devendorized version of the provided import path.
// e.g. "foo/bar/vendor/a/b" => "a/b"
func vendorlessImportPath(ipath string) string {
// Devendorize for use in import statement.
if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
return ipath[i+len("/vendor/"):]
}
if strings.HasPrefix(ipath, "vendor/") {
return ipath[len("vendor/"):]
}
return ipath
}
// loadExports returns the set of exported symbols in the package at dir.
// It returns nil on error or if the package name in dir does not match expectPackage.
var loadExports func(expectPackage, dir string) map[string]bool = loadExportsGoPath
func loadExportsGoPath(expectPackage, dir string) map[string]bool {
if Debug {
log.Printf("loading exports in dir %s (seeking package %s)", dir, expectPackage)
}
exports := make(map[string]bool)
ctx := build.Default
// ReadDir is like ioutil.ReadDir, but only returns *.go files
// and filters out _test.go files since they're not relevant
// and only slow things down.
ctx.ReadDir = func(dir string) (notTests []os.FileInfo, err error) {
all, err := ioutil.ReadDir(dir)
if err != nil {
return nil, err
}
notTests = all[:0]
for _, fi := range all {
name := fi.Name()
if strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") {
notTests = append(notTests, fi)
}
}
return notTests, nil
}
files, err := ctx.ReadDir(dir)
if err != nil {
log.Print(err)
return nil
}
fset := token.NewFileSet()
for _, fi := range files {
match, err := ctx.MatchFile(dir, fi.Name())
if err != nil || !match {
continue
}
fullFile := filepath.Join(dir, fi.Name())
f, err := parser.ParseFile(fset, fullFile, nil, 0)
if err != nil {
if Debug {
log.Printf("Parsing %s: %v", fullFile, err)
}
return nil
}
pkgName := f.Name.Name
if pkgName == "documentation" {
// Special case from go/build.ImportDir, not
// handled by ctx.MatchFile.
continue
}
if pkgName != expectPackage {
if Debug {
log.Printf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName)
}
return nil
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
if Debug {
exportList := make([]string, 0, len(exports))
for k := range exports {
exportList = append(exportList, k)
}
sort.Strings(exportList)
log.Printf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", "))
}
return exports
}
// findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil)
//
// This is declared as a variable rather than a function so goimports
// can be easily extended by adding a file with an init function.
//
// The rename value tells goimports whether to use the package name as
// a local qualifier in an import. For example, if findImports("pkg",
// "X") returns ("foo/bar", rename=true), then goimports adds the
// import line:
// import pkg "foo/bar"
// to satisfy uses of pkg.X in the file.
var findImport func(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) = findImportGoPath
// findImportGoPath is the normal implementation of findImport.
// (Some companies have their own internally.)
func findImportGoPath(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) {
if inTests {
testMu.RLock()
defer testMu.RUnlock()
}
// Fast path for the standard library.
// In the common case we hopefully never have to scan the GOPATH, which can
// be slow with moving disks.
if pkg, rename, ok := findImportStdlib(pkgName, symbols); ok {
return pkg, rename, nil
}
if pkgName == "rand" && symbols["Read"] {
// Special-case rand.Read.
//
// If findImportStdlib didn't find it above, don't go
// searching for it, lest it find and pick math/rand
// in GOROOT (new as of Go 1.6)
//
// crypto/rand is the safer choice.
return "", false, nil
}
// TODO(sameer): look at the import lines for other Go files in the
// local directory, since the user is likely to import the same packages
// in the current Go file. Return rename=true when the other Go files
// use a renamed package that's also used in the current file.
// Read all the $GOPATH/src/.goimportsignore files before scanning directories.
populateIgnoreOnce.Do(populateIgnore)
// Start scanning the $GOROOT asynchronously, then run the
// GOPATH scan synchronously if needed, and then wait for the
// $GOROOT to finish.
//
// TODO(bradfitz): run each $GOPATH entry async. But nobody
// really has more than one anyway, so low priority.
scanGoRootOnce.Do(scanGoRoot) // async
if !fileInDir(filename, build.Default.GOROOT) {
scanGoPathOnce.Do(scanGoPath) // blocking
}
<-scanGoRootDone
// Find candidate packages, looking only at their directory names first.
var candidates []*pkg
for _, pkg := range dirScan {
if pkgIsCandidate(filename, pkgName, pkg) {
candidates = append(candidates, pkg)
}
}
// Sort the candidates by their import package length,
// assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so
// there's no "penalty" for vendoring.
sort.Sort(byImportPathShortLength(candidates))
if Debug {
for i, pkg := range candidates {
log.Printf("%s candidate %d/%d: %v", pkgName, i+1, len(candidates), pkg.importPathShort)
}
}
// Collect exports for packages with matching names.
done := make(chan struct{}) // closed when we find the answer
defer close(done)
rescv := make([]chan *pkg, len(candidates))
for i := range candidates {
rescv[i] = make(chan *pkg)
}
const maxConcurrentPackageImport = 4
loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
go func() {
for i, pkg := range candidates {
select {
case loadExportsSem <- struct{}{}:
select {
case <-done:
default:
}
case <-done:
return
}
pkg := pkg
resc := rescv[i]
go func() {
if inTests {
testMu.RLock()
defer testMu.RUnlock()
}
defer func() { <-loadExportsSem }()
exports := loadExports(pkgName, pkg.dir)
// If it doesn't have the right
// symbols, send nil to mean no match.
for symbol := range symbols {
if !exports[symbol] {
pkg = nil
break
}
}
select {
case resc <- pkg:
case <-done:
}
}()
}
}()
for _, resc := range rescv {
pkg := <-resc
if pkg == nil {
continue
}
// If the package name in the source doesn't match the import path's base,
// return true so the rewriter adds a name (import foo "github.com/bar/go-foo")
needsRename := path.Base(pkg.importPath) != pkgName
return pkg.importPathShort, needsRename, nil
}
return "", false, nil
}
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
// finding which package pkgIdent in the file named by filename is trying
// to refer to.
//
// This check is purely lexical and is meant to be as fast as possible
// because it's run over all $GOPATH directories to filter out poor
// candidates in order to limit the CPU and I/O later parsing the
// exports in candidate packages.
//
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
}
// Speed optimization to minimize disk I/O:
// the last two components on disk must contain the
// package name somewhere.
//
// This permits mismatch naming like directory
// "go-foo" being package "foo", or "pkg.v3" being "pkg",
// or directory "google.golang.org/api/cloudbilling/v1"
// being package "cloudbilling", but doesn't
// permit a directory "foo" to be package
// "bar", which is strongly discouraged
// anyway. There's no reason goimports needs
// to be slow just to accomodate that.
lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
}
return false
}
func hasHyphenOrUpperASCII(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if b == '-' || ('A' <= b && b <= 'Z') {
return true
}
}
return false
}
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
buf := make([]byte, 0, len(s))
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case b == '-':
continue
case 'A' <= b && b <= 'Z':
buf = append(buf, b+('a'-'A'))
default:
buf = append(buf, b)
}
}
return string(buf)
}
// canUse reports whether the package in dir is usable from filename,
// respecting the Go "internal" and "vendor" visibility rules.
func canUse(filename, dir string) bool {
// Fast path check, before any allocations. If it doesn't contain vendor
// or internal, it's not tricky:
// Note that this can false-negative on directories like "notinternal",
// but we check it correctly below. This is just a fast path.
if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
return true
}
dirSlash := filepath.ToSlash(dir)
if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
return true
}
// Vendor or internal directory only visible from children of parent.
// That means the path from the current directory to the target directory
// can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
// or bar/vendor or bar/internal.
// After stripping all the leading ../, the only okay place to see vendor or internal
// is at the very beginning of the path.
absfile, err := filepath.Abs(filename)
if err != nil {
return false
}
absdir, err := filepath.Abs(dir)
if err != nil {
return false
}
rel, err := filepath.Rel(absfile, absdir)
if err != nil {
return false
}
relSlash := filepath.ToSlash(rel)
if i := strings.LastIndex(relSlash, "../"); i >= 0 {
relSlash = relSlash[i+len("../"):]
}
return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
}
// lastTwoComponents returns at most the last two path components
// of v, using either / or \ as the path separator.
func lastTwoComponents(v string) string {
nslash := 0
for i := len(v) - 1; i >= 0; i-- {
if v[i] == '/' || v[i] == '\\' {
nslash++
if nslash == 2 {
return v[i:]
}
}
}
return v
}
type visitFn func(node ast.Node) ast.Visitor
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
return fn(node)
}
func findImportStdlib(shortPkg string, symbols map[string]bool) (importPath string, rename, ok bool) {
for symbol := range symbols {
key := shortPkg + "." + symbol
path := stdlib[key]
if path == "" {
if key == "rand.Read" {
continue
}
return "", false, false
}
if importPath != "" && importPath != path {
// Ambiguous. Symbols pointed to different things.
return "", false, false
}
importPath = path
}
if importPath == "" && shortPkg == "rand" && symbols["Read"] {
return "crypto/rand", false, true
}
return importPath, false, importPath != ""
}
// fileInDir reports whether the provided file path looks like
// it's in dir. (without hitting the filesystem)
func fileInDir(file, dir string) bool {
rest := strings.TrimPrefix(file, dir)
if len(rest) == len(file) {
// dir is not a prefix of file.
return false
}
// Check for boundary: either nothing (file == dir), or a slash.
return len(rest) == 0 || rest[0] == '/' || rest[0] == '\\'
}

@ -0,0 +1,289 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkstdlib.go
// Package imports implements a Go pretty-printer (like package "go/format")
// that also adds or removes import statements as necessary.
package imports // import "golang.org/x/tools/imports"
import (
"bufio"
"bytes"
"fmt"
"go/ast"
"go/format"
"go/parser"
"go/printer"
"go/token"
"io"
"regexp"
"strconv"
"strings"
"golang.org/x/tools/go/ast/astutil"
)
// Options specifies options for processing files.
type Options struct {
Fragment bool // Accept fragment of a source file (no package statement)
AllErrors bool // Report all errors (not just the first 10 on different lines)
Comments bool // Print comments (true if nil *Options provided)
TabIndent bool // Use tabs for indent (true if nil *Options provided)
TabWidth int // Tab width (8 if nil *Options provided)
FormatOnly bool // Disable the insertion and deletion of imports
}
// Process formats and adjusts imports for the provided file.
// If opt is nil the defaults are used.
//
// Note that filename's directory influences which imports can be chosen,
// so it is important that filename be accurate.
// To process data ``as if'' it were in filename, pass the data as a non-nil src.
func Process(filename string, src []byte, opt *Options) ([]byte, error) {
if opt == nil {
opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
}
fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt)
if err != nil {
return nil, err
}
if !opt.FormatOnly {
_, err = fixImports(fileSet, file, filename)
if err != nil {
return nil, err
}
}
sortImports(fileSet, file)
imps := astutil.Imports(fileSet, file)
var spacesBefore []string // import paths we need spaces before
for _, impSection := range imps {
// Within each block of contiguous imports, see if any
// import lines are in different group numbers. If so,
// we'll need to put a space between them so it's
// compatible with gofmt.
lastGroup := -1
for _, importSpec := range impSection {
importPath, _ := strconv.Unquote(importSpec.Path.Value)
groupNum := importGroup(importPath)
if groupNum != lastGroup && lastGroup != -1 {
spacesBefore = append(spacesBefore, importPath)
}
lastGroup = groupNum
}
}
printerMode := printer.UseSpaces
if opt.TabIndent {
printerMode |= printer.TabIndent
}
printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
var buf bytes.Buffer
err = printConfig.Fprint(&buf, fileSet, file)
if err != nil {
return nil, err
}
out := buf.Bytes()
if adjust != nil {
out = adjust(src, out)
}
if len(spacesBefore) > 0 {
out = addImportSpaces(bytes.NewReader(out), spacesBefore)
}
out, err = format.Source(out)
if err != nil {
return nil, err
}
return out, nil
}
// parse parses src, which was read from filename,
// as a Go source file or statement list.
func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
parserMode := parser.Mode(0)
if opt.Comments {
parserMode |= parser.ParseComments
}
if opt.AllErrors {
parserMode |= parser.AllErrors
}
// Try as whole source file.
file, err := parser.ParseFile(fset, filename, src, parserMode)
if err == nil {
return file, nil, nil
}
// If the error is that the source file didn't begin with a
// package line and we accept fragmented input, fall through to
// try as a source fragment. Stop and return on any other error.
if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
return nil, nil, err
}
// If this is a declaration list, make it a source file
// by inserting a package clause.
// Insert using a ;, not a newline, so that the line numbers
// in psrc match the ones in src.
psrc := append([]byte("package main;"), src...)
file, err = parser.ParseFile(fset, filename, psrc, parserMode)
if err == nil {
// If a main function exists, we will assume this is a main
// package and leave the file.
if containsMainFunc(file) {
return file, nil, nil
}
adjust := func(orig, src []byte) []byte {
// Remove the package clause.
// Gofmt has turned the ; into a \n.
src = src[len("package main\n"):]
return matchSpace(orig, src)
}
return file, adjust, nil
}
// If the error is that the source file didn't begin with a
// declaration, fall through to try as a statement list.
// Stop and return on any other error.
if !strings.Contains(err.Error(), "expected declaration") {
return nil, nil, err
}
// If this is a statement list, make it a source file
// by inserting a package clause and turning the list
// into a function body. This handles expressions too.
// Insert using a ;, not a newline, so that the line numbers
// in fsrc match the ones in src.
fsrc := append(append([]byte("package p; func _() {"), src...), '}')
file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
if err == nil {
adjust := func(orig, src []byte) []byte {
// Remove the wrapping.
// Gofmt has turned the ; into a \n\n.
src = src[len("package p\n\nfunc _() {"):]
src = src[:len(src)-len("}\n")]
// Gofmt has also indented the function body one level.
// Remove that indent.
src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
return matchSpace(orig, src)
}
return file, adjust, nil
}
// Failed, and out of options.
return nil, nil, err
}
// containsMainFunc checks if a file contains a function declaration with the
// function signature 'func main()'
func containsMainFunc(file *ast.File) bool {
for _, decl := range file.Decls {
if f, ok := decl.(*ast.FuncDecl); ok {
if f.Name.Name != "main" {
continue
}
if len(f.Type.Params.List) != 0 {
continue
}
if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
continue
}
return true
}
}
return false
}
func cutSpace(b []byte) (before, middle, after []byte) {
i := 0
for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
i++
}
j := len(b)
for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
j--
}
if i <= j {
return b[:i], b[i:j], b[j:]
}
return nil, nil, b[j:]
}
// matchSpace reformats src to use the same space context as orig.
// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
// 2) matchSpace copies the indentation of the first non-blank line in orig
// to every non-blank line in src.
// 3) matchSpace copies the trailing space from orig and uses it in place
// of src's trailing space.
func matchSpace(orig []byte, src []byte) []byte {
before, _, after := cutSpace(orig)
i := bytes.LastIndex(before, []byte{'\n'})
before, indent := before[:i+1], before[i+1:]
_, src, _ = cutSpace(src)
var b bytes.Buffer
b.Write(before)
for len(src) > 0 {
line := src
if i := bytes.IndexByte(line, '\n'); i >= 0 {
line, src = line[:i+1], line[i+1:]
} else {
src = nil
}
if len(line) > 0 && line[0] != '\n' { // not blank
b.Write(indent)
}
b.Write(line)
}
b.Write(after)
return b.Bytes()
}
var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
func addImportSpaces(r io.Reader, breaks []string) []byte {
var out bytes.Buffer
sc := bufio.NewScanner(r)
inImports := false
done := false
for sc.Scan() {
s := sc.Text()
if !inImports && !done && strings.HasPrefix(s, "import") {
inImports = true
}
if inImports && (strings.HasPrefix(s, "var") ||
strings.HasPrefix(s, "func") ||
strings.HasPrefix(s, "const") ||
strings.HasPrefix(s, "type")) {
done = true
inImports = false
}
if inImports && len(breaks) > 0 {
if m := impLine.FindStringSubmatch(s); m != nil {
if m[1] == breaks[0] {
out.WriteByte('\n')
breaks = breaks[1:]
}
}
}
fmt.Fprintln(&out, s)
}
return out.Bytes()
}

@ -0,0 +1,212 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Hacked up copy of go/ast/import.go
package imports
import (
"go/ast"
"go/token"
"sort"
"strconv"
)
// sortImports sorts runs of consecutive import lines in import blocks in f.
// It also removes duplicate imports when it is possible to do so without data loss.
func sortImports(fset *token.FileSet, f *ast.File) {
for i, d := range f.Decls {
d, ok := d.(*ast.GenDecl)
if !ok || d.Tok != token.IMPORT {
// Not an import declaration, so we're done.
// Imports are always first.
break
}
if len(d.Specs) == 0 {
// Empty import block, remove it.
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
}
if !d.Lparen.IsValid() {
// Not a block: sorted by default.
continue
}
// Identify and sort runs of specs on successive lines.
i := 0
specs := d.Specs[:0]
for j, s := range d.Specs {
if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
// j begins a new run. End this one.
specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
i = j
}
}
specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
d.Specs = specs
// Deduping can leave a blank line before the rparen; clean that up.
if len(d.Specs) > 0 {
lastSpec := d.Specs[len(d.Specs)-1]
lastLine := fset.Position(lastSpec.Pos()).Line
if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
fset.File(d.Rparen).MergeLine(rParenLine - 1)
}
}
}
}
func importPath(s ast.Spec) string {
t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
if err == nil {
return t
}
return ""
}
func importName(s ast.Spec) string {
n := s.(*ast.ImportSpec).Name
if n == nil {
return ""
}
return n.Name
}
func importComment(s ast.Spec) string {
c := s.(*ast.ImportSpec).Comment
if c == nil {
return ""
}
return c.Text()
}
// collapse indicates whether prev may be removed, leaving only next.
func collapse(prev, next ast.Spec) bool {
if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
return false
}
return prev.(*ast.ImportSpec).Comment == nil
}
type posSpan struct {
Start token.Pos
End token.Pos
}
func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
// Can't short-circuit here even if specs are already sorted,
// since they might yet need deduplication.
// A lone import, however, may be safely ignored.
if len(specs) <= 1 {
return specs
}
// Record positions for specs.
pos := make([]posSpan, len(specs))
for i, s := range specs {
pos[i] = posSpan{s.Pos(), s.End()}
}
// Identify comments in this range.
// Any comment from pos[0].Start to the final line counts.
lastLine := fset.Position(pos[len(pos)-1].End).Line
cstart := len(f.Comments)
cend := len(f.Comments)
for i, g := range f.Comments {
if g.Pos() < pos[0].Start {
continue
}
if i < cstart {
cstart = i
}
if fset.Position(g.End()).Line > lastLine {
cend = i
break
}
}
comments := f.Comments[cstart:cend]
// Assign each comment to the import spec preceding it.
importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
specIndex := 0
for _, g := range comments {
for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
specIndex++
}
s := specs[specIndex].(*ast.ImportSpec)
importComment[s] = append(importComment[s], g)
}
// Sort the import specs by import path.
// Remove duplicates, when possible without data loss.
// Reassign the import paths to have the same position sequence.
// Reassign each comment to abut the end of its spec.
// Sort the comments by new position.
sort.Sort(byImportSpec(specs))
// Dedup. Thanks to our sorting, we can just consider
// adjacent pairs of imports.
deduped := specs[:0]
for i, s := range specs {
if i == len(specs)-1 || !collapse(s, specs[i+1]) {
deduped = append(deduped, s)
} else {
p := s.Pos()
fset.File(p).MergeLine(fset.Position(p).Line)
}
}
specs = deduped
// Fix up comment positions
for i, s := range specs {
s := s.(*ast.ImportSpec)
if s.Name != nil {
s.Name.NamePos = pos[i].Start
}
s.Path.ValuePos = pos[i].Start
s.EndPos = pos[i].End
for _, g := range importComment[s] {
for _, c := range g.List {
c.Slash = pos[i].End
}
}
}
sort.Sort(byCommentPos(comments))
return specs
}
type byImportSpec []ast.Spec // slice of *ast.ImportSpec
func (x byImportSpec) Len() int { return len(x) }
func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byImportSpec) Less(i, j int) bool {
ipath := importPath(x[i])
jpath := importPath(x[j])
igroup := importGroup(ipath)
jgroup := importGroup(jpath)
if igroup != jgroup {
return igroup < jgroup
}
if ipath != jpath {
return ipath < jpath
}
iname := importName(x[i])
jname := importName(x[j])
if iname != jname {
return iname < jname
}
return importComment(x[i]) < importComment(x[j])
}
type byCommentPos []*ast.CommentGroup
func (x byCommentPos) Len() int { return len(x) }
func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }

File diff suppressed because it is too large Load Diff

32
vendor/vendor.json vendored

@ -0,0 +1,32 @@
{
"comment": "",
"ignore": "test ignore",
"package": [
{
"checksumSHA1": "WXQKmdzMj9t4XgMdrghVO8jmKRY=",
"origin": "github.com/fjl/gogen",
"path": "github.com/garslo/gogen",
"revision": "1d203ffc1f61870f13a589286cd21b9fdcaf7755",
"revisionTime": "2017-03-06T19:27:44Z"
},
{
"checksumSHA1": "0xjvz/KGzXhAFsnDRvzofE0xNzI=",
"path": "github.com/kylelemons/godebug/diff",
"revision": "a616ab194758ae0a11290d87ca46ee8c440117b0",
"revisionTime": "2017-02-24T01:00:52Z"
},
{
"checksumSHA1": "ikor+YKJu2eKwyFteBWhsb8IGy8=",
"path": "golang.org/x/tools/go/ast/astutil",
"revision": "6e7ee5a9ec598d425ca86d6aab6e76e21baf328c",
"revisionTime": "2017-02-09T19:54:08Z"
},
{
"checksumSHA1": "q7d529ueItc/mhXnJ1J9/FUCYu4=",
"path": "golang.org/x/tools/imports",
"revision": "6e7ee5a9ec598d425ca86d6aab6e76e21baf328c",
"revisionTime": "2017-02-09T19:54:08Z"
}
],
"rootPath": "github.com/fjl/gencodec"
}
Loading…
Cancel
Save