vendor golint

This commit is contained in:
Sen Lu
2018-12-21 11:30:46 -08:00
parent 6a61a9af40
commit 242ce12de7
26 changed files with 5757 additions and 14 deletions

27
vendor/golang.org/x/tools/go/gcexportdata/BUILD generated vendored Normal file
View File

@@ -0,0 +1,27 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"gcexportdata.go",
"importer.go",
],
importmap = "k8s.io/kubernetes/vendor/golang.org/x/tools/go/gcexportdata",
importpath = "golang.org/x/tools/go/gcexportdata",
visibility = ["//visibility:public"],
deps = ["//vendor/golang.org/x/tools/go/gcimporter15:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,100 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gcexportdata provides functions for locating, reading, and
// writing export data files containing type information produced by the
// gc compiler. This package supports go1.7 export data format and all
// later versions.
//
// This package replaces the deprecated golang.org/x/tools/go/gcimporter15
// package, which will be deleted in October 2017.
//
// Although it might seem convenient for this package to live alongside
// go/types in the standard library, this would cause version skew
// problems for developer tools that use it, since they must be able to
// consume the outputs of the gc compiler both before and after a Go
// update such as from Go 1.7 to Go 1.8. Because this package lives in
// golang.org/x/tools, sites can update their version of this repo some
// time before the Go 1.8 release and rebuild and redeploy their
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://github.com/golang/go/issues/15651.)
//
package gcexportdata
import (
"bufio"
"bytes"
"fmt"
"go/token"
"go/types"
"io"
"io/ioutil"
gcimporter "golang.org/x/tools/go/gcimporter15"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
// using the workspace layout conventions of go/build.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
func Find(importPath string, srcDir string) (filename, path string) {
return gcimporter.FindPkg(importPath, srcDir)
}
// NewReader returns a reader for the export data section of an object
// (.o) or archive (.a) file read from r. The new reader may provide
// additional trailing data beyond the end of the export data.
func NewReader(r io.Reader) (io.Reader, error) {
buf := bufio.NewReader(r)
_, err := gcimporter.FindExportData(buf)
// If we ever switch to a zip-like archive format with the ToC
// at the end, we can return the correct portion of export data,
// but for now we must return the entire rest of the file.
return buf, err
}
// Read reads export data from in, decodes it, and returns type
// information for the package.
// The package name is specified by path.
// File position information is added to fset.
//
// Read may inspect and add to the imports map to ensure that references
// within the export data to other packages are consistent. The caller
// must ensure that imports[path] does not exist, or exists but is
// incomplete (see types.Package.Complete), and Read inserts the
// resulting package into this map entry.
//
// On return, the state of the reader is undefined.
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
data, err := ioutil.ReadAll(in)
if err != nil {
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
}
if bytes.HasPrefix(data, []byte("!<arch>")) {
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
}
// The App Engine Go runtime v1.6 uses the old export data format.
// TODO(adonovan): delete once v1.7 has been around for a while.
if bytes.HasPrefix(data, []byte("package ")) {
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
}
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
return pkg, err
}
// Write writes encoded type information for the specified package to out.
// The FileSet provides file position information for named objects.
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
_, err := out.Write(gcimporter.BExportData(fset, pkg))
return err
}

73
vendor/golang.org/x/tools/go/gcexportdata/importer.go generated vendored Normal file
View File

@@ -0,0 +1,73 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gcexportdata
import (
"fmt"
"go/token"
"go/types"
"os"
)
// NewImporter returns a new instance of the types.Importer interface
// that reads type information from export data files written by gc.
// The Importer also satisfies types.ImporterFrom.
//
// Export data files are located using "go build" workspace conventions
// and the build.Default context.
//
// Use this importer instead of go/importer.For("gc", ...) to avoid the
// version-skew problems described in the documentation of this package,
// or to control the FileSet or access the imports map populated during
// package loading.
//
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}
type importer struct {
fset *token.FileSet
imports map[string]*types.Package
}
func (imp importer) Import(importPath string) (*types.Package, error) {
return imp.ImportFrom(importPath, "", 0)
}
func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
filename, path := Find(importPath, srcDir)
if filename == "" {
if importPath == "unsafe" {
// Even for unsafe, call Find first in case
// the package was vendored.
return types.Unsafe, nil
}
return nil, fmt.Errorf("can't find import: %s", importPath)
}
if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
return pkg, nil // cache hit
}
// open file
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer func() {
f.Close()
if err != nil {
// add file name to error
err = fmt.Errorf("reading export data: %s: %v", filename, err)
}
}()
r, err := NewReader(f)
if err != nil {
return nil, err
}
return Read(r, imp.fset, imp.imports, path)
}

30
vendor/golang.org/x/tools/go/gcimporter15/BUILD generated vendored Normal file
View File

@@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"bexport.go",
"bimport.go",
"exportdata.go",
"gcimporter.go",
"isAlias18.go",
"isAlias19.go",
],
importmap = "k8s.io/kubernetes/vendor/golang.org/x/tools/go/gcimporter15",
importpath = "golang.org/x/tools/go/gcimporter15",
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

828
vendor/golang.org/x/tools/go/gcimporter15/bexport.go generated vendored Normal file
View File

@@ -0,0 +1,828 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Binary package export.
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
// see that file for specification of the format.
package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"log"
"math"
"math/big"
"sort"
"strings"
)
// If debugFormat is set, each integer and string value is preceded by a marker
// and position information in the encoding. This mechanism permits an importer
// to recognize immediately when it is out of sync. The importer recognizes this
// mode automatically (i.e., it can import export data produced with debugging
// support even if debugFormat is not set at the time of import). This mode will
// lead to massively larger export data (by a factor of 2 to 3) and should only
// be enabled during development and debugging.
//
// NOTE: This flag is the first flag to enable if importing dies because of
// (suspected) format errors, and whenever a change is made to the format.
const debugFormat = false // default: false
// If trace is set, debugging output is printed to std out.
const trace = false // default: false
// Current export format version. Increase with each format change.
// 4: type name objects support type aliases, uses aliasTag
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
// 2: removed unused bool in ODCL export (compiler only)
// 1: header format change (more regular), export package for _ struct fields
// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
// types. The existing compiler invariants assume that unnamed types
// that are not completely set up are not used, or else there are spurious
// errors.
// If disabled, only named types are tracked, possibly leading to slightly
// less efficient encoding in rare cases. It also prevents the export of
// some corner-case type declarations (but those are not handled correctly
// with with the textual export format either).
// TODO(gri) enable and remove once issues caused by it are fixed
const trackAllTypes = false
type exporter struct {
fset *token.FileSet
out bytes.Buffer
// object -> index maps, indexed in order of serialization
strIndex map[string]int
pkgIndex map[*types.Package]int
typIndex map[types.Type]int
// position encoding
posInfoFormat bool
prevFile string
prevLine int
// debugging support
written int // bytes written
indent int // for trace
}
// BExportData returns binary export data for pkg.
// If no file set is provided, position info will be missing.
func BExportData(fset *token.FileSet, pkg *types.Package) []byte {
p := exporter{
fset: fset,
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
pkgIndex: make(map[*types.Package]int),
typIndex: make(map[types.Type]int),
posInfoFormat: true, // TODO(gri) might become a flag, eventually
}
// write version info
// The version string must start with "version %d" where %d is the version
// number. Additional debugging information may follow after a blank; that
// text is ignored by the importer.
p.rawStringln(fmt.Sprintf("version %d", exportVersion))
var debug string
if debugFormat {
debug = "debug"
}
p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
p.bool(trackAllTypes)
p.bool(p.posInfoFormat)
// --- generic export data ---
// populate type map with predeclared "known" types
for index, typ := range predeclared {
p.typIndex[typ] = index
}
if len(p.typIndex) != len(predeclared) {
log.Fatalf("gcimporter: duplicate entries in type map?")
}
// write package data
p.pkg(pkg, true)
if trace {
p.tracef("\n")
}
// write objects
objcount := 0
scope := pkg.Scope()
for _, name := range scope.Names() {
if !ast.IsExported(name) {
continue
}
if trace {
p.tracef("\n")
}
p.obj(scope.Lookup(name))
objcount++
}
// indicate end of list
if trace {
p.tracef("\n")
}
p.tag(endTag)
// for self-verification only (redundant)
p.int(objcount)
if trace {
p.tracef("\n")
}
// --- end of export data ---
return p.out.Bytes()
}
func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
if pkg == nil {
log.Fatalf("gcimporter: unexpected nil pkg")
}
// if we saw the package before, write its index (>= 0)
if i, ok := p.pkgIndex[pkg]; ok {
p.index('P', i)
return
}
// otherwise, remember the package, write the package tag (< 0) and package data
if trace {
p.tracef("P%d = { ", len(p.pkgIndex))
defer p.tracef("} ")
}
p.pkgIndex[pkg] = len(p.pkgIndex)
p.tag(packageTag)
p.string(pkg.Name())
if emptypath {
p.string("")
} else {
p.string(pkg.Path())
}
}
func (p *exporter) obj(obj types.Object) {
switch obj := obj.(type) {
case *types.Const:
p.tag(constTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
p.value(obj.Val())
case *types.TypeName:
if isAlias(obj) {
p.tag(aliasTag)
p.pos(obj)
p.qualifiedName(obj)
} else {
p.tag(typeTag)
}
p.typ(obj.Type())
case *types.Var:
p.tag(varTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
case *types.Func:
p.tag(funcTag)
p.pos(obj)
p.qualifiedName(obj)
sig := obj.Type().(*types.Signature)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
default:
log.Fatalf("gcimporter: unexpected object %v (%T)", obj, obj)
}
}
func (p *exporter) pos(obj types.Object) {
if !p.posInfoFormat {
return
}
file, line := p.fileLine(obj)
if file == p.prevFile {
// common case: write line delta
// delta == 0 means different file or no line change
delta := line - p.prevLine
p.int(delta)
if delta == 0 {
p.int(-1) // -1 means no file change
}
} else {
// different file
p.int(0)
// Encode filename as length of common prefix with previous
// filename, followed by (possibly empty) suffix. Filenames
// frequently share path prefixes, so this can save a lot
// of space and make export data size less dependent on file
// path length. The suffix is unlikely to be empty because
// file names tend to end in ".go".
n := commonPrefixLen(p.prevFile, file)
p.int(n) // n >= 0
p.string(file[n:]) // write suffix only
p.prevFile = file
p.int(line)
}
p.prevLine = line
}
func (p *exporter) fileLine(obj types.Object) (file string, line int) {
if p.fset != nil {
pos := p.fset.Position(obj.Pos())
file = pos.Filename
line = pos.Line
}
return
}
func commonPrefixLen(a, b string) int {
if len(a) > len(b) {
a, b = b, a
}
// len(a) <= len(b)
i := 0
for i < len(a) && a[i] == b[i] {
i++
}
return i
}
func (p *exporter) qualifiedName(obj types.Object) {
p.string(obj.Name())
p.pkg(obj.Pkg(), false)
}
func (p *exporter) typ(t types.Type) {
if t == nil {
log.Fatalf("gcimporter: nil type")
}
// Possible optimization: Anonymous pointer types *T where
// T is a named type are common. We could canonicalize all
// such types *T to a single type PT = *T. This would lead
// to at most one *T entry in typIndex, and all future *T's
// would be encoded as the respective index directly. Would
// save 1 byte (pointerTag) per *T and reduce the typIndex
// size (at the cost of a canonicalization map). We can do
// this later, without encoding format change.
// if we saw the type before, write its index (>= 0)
if i, ok := p.typIndex[t]; ok {
p.index('T', i)
return
}
// otherwise, remember the type, write the type tag (< 0) and type data
if trackAllTypes {
if trace {
p.tracef("T%d = {>\n", len(p.typIndex))
defer p.tracef("<\n} ")
}
p.typIndex[t] = len(p.typIndex)
}
switch t := t.(type) {
case *types.Named:
if !trackAllTypes {
// if we don't track all types, track named types now
p.typIndex[t] = len(p.typIndex)
}
p.tag(namedTag)
p.pos(t.Obj())
p.qualifiedName(t.Obj())
p.typ(t.Underlying())
if !types.IsInterface(t) {
p.assocMethods(t)
}
case *types.Array:
p.tag(arrayTag)
p.int64(t.Len())
p.typ(t.Elem())
case *types.Slice:
p.tag(sliceTag)
p.typ(t.Elem())
case *dddSlice:
p.tag(dddTag)
p.typ(t.elem)
case *types.Struct:
p.tag(structTag)
p.fieldList(t)
case *types.Pointer:
p.tag(pointerTag)
p.typ(t.Elem())
case *types.Signature:
p.tag(signatureTag)
p.paramList(t.Params(), t.Variadic())
p.paramList(t.Results(), false)
case *types.Interface:
p.tag(interfaceTag)
p.iface(t)
case *types.Map:
p.tag(mapTag)
p.typ(t.Key())
p.typ(t.Elem())
case *types.Chan:
p.tag(chanTag)
p.int(int(3 - t.Dir())) // hack
p.typ(t.Elem())
default:
log.Fatalf("gcimporter: unexpected type %T: %s", t, t)
}
}
func (p *exporter) assocMethods(named *types.Named) {
// Sort methods (for determinism).
var methods []*types.Func
for i := 0; i < named.NumMethods(); i++ {
methods = append(methods, named.Method(i))
}
sort.Sort(methodsByName(methods))
p.int(len(methods))
if trace && methods != nil {
p.tracef("associated methods {>\n")
}
for i, m := range methods {
if trace && i > 0 {
p.tracef("\n")
}
p.pos(m)
name := m.Name()
p.string(name)
if !exported(name) {
p.pkg(m.Pkg(), false)
}
sig := m.Type().(*types.Signature)
p.paramList(types.NewTuple(sig.Recv()), false)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
p.int(0) // dummy value for go:nointerface pragma - ignored by importer
}
if trace && methods != nil {
p.tracef("<\n} ")
}
}
type methodsByName []*types.Func
func (x methodsByName) Len() int { return len(x) }
func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
func (p *exporter) fieldList(t *types.Struct) {
if trace && t.NumFields() > 0 {
p.tracef("fields {>\n")
defer p.tracef("<\n} ")
}
p.int(t.NumFields())
for i := 0; i < t.NumFields(); i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.field(t.Field(i))
p.string(t.Tag(i))
}
}
func (p *exporter) field(f *types.Var) {
if !f.IsField() {
log.Fatalf("gcimporter: field expected")
}
p.pos(f)
p.fieldName(f)
p.typ(f.Type())
}
func (p *exporter) iface(t *types.Interface) {
// TODO(gri): enable importer to load embedded interfaces,
// then emit Embeddeds and ExplicitMethods separately here.
p.int(0)
n := t.NumMethods()
if trace && n > 0 {
p.tracef("methods {>\n")
defer p.tracef("<\n} ")
}
p.int(n)
for i := 0; i < n; i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.method(t.Method(i))
}
}
func (p *exporter) method(m *types.Func) {
sig := m.Type().(*types.Signature)
if sig.Recv() == nil {
log.Fatalf("gcimporter: method expected")
}
p.pos(m)
p.string(m.Name())
if m.Name() != "_" && !ast.IsExported(m.Name()) {
p.pkg(m.Pkg(), false)
}
// interface method; no need to encode receiver.
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
}
func (p *exporter) fieldName(f *types.Var) {
name := f.Name()
if f.Anonymous() {
// anonymous field - we distinguish between 3 cases:
// 1) field name matches base type name and is exported
// 2) field name matches base type name and is not exported
// 3) field name doesn't match base type name (alias name)
bname := basetypeName(f.Type())
if name == bname {
if ast.IsExported(name) {
name = "" // 1) we don't need to know the field name or package
} else {
name = "?" // 2) use unexported name "?" to force package export
}
} else {
// 3) indicate alias and export name as is
// (this requires an extra "@" but this is a rare case)
p.string("@")
}
}
p.string(name)
if name != "" && !ast.IsExported(name) {
p.pkg(f.Pkg(), false)
}
}
func basetypeName(typ types.Type) string {
switch typ := deref(typ).(type) {
case *types.Basic:
return typ.Name()
case *types.Named:
return typ.Obj().Name()
default:
return "" // unnamed type
}
}
func (p *exporter) paramList(params *types.Tuple, variadic bool) {
// use negative length to indicate unnamed parameters
// (look at the first parameter only since either all
// names are present or all are absent)
n := params.Len()
if n > 0 && params.At(0).Name() == "" {
n = -n
}
p.int(n)
for i := 0; i < params.Len(); i++ {
q := params.At(i)
t := q.Type()
if variadic && i == params.Len()-1 {
t = &dddSlice{t.(*types.Slice).Elem()}
}
p.typ(t)
if n > 0 {
name := q.Name()
p.string(name)
if name != "_" {
p.pkg(q.Pkg(), false)
}
}
p.string("") // no compiler-specific info
}
}
func (p *exporter) value(x constant.Value) {
if trace {
p.tracef("= ")
}
switch x.Kind() {
case constant.Bool:
tag := falseTag
if constant.BoolVal(x) {
tag = trueTag
}
p.tag(tag)
case constant.Int:
if v, exact := constant.Int64Val(x); exact {
// common case: x fits into an int64 - use compact encoding
p.tag(int64Tag)
p.int64(v)
return
}
// uncommon case: large x - use float encoding
// (powers of 2 will be encoded efficiently with exponent)
p.tag(floatTag)
p.float(constant.ToFloat(x))
case constant.Float:
p.tag(floatTag)
p.float(x)
case constant.Complex:
p.tag(complexTag)
p.float(constant.Real(x))
p.float(constant.Imag(x))
case constant.String:
p.tag(stringTag)
p.string(constant.StringVal(x))
case constant.Unknown:
// package contains type errors
p.tag(unknownTag)
default:
log.Fatalf("gcimporter: unexpected value %v (%T)", x, x)
}
}
func (p *exporter) float(x constant.Value) {
if x.Kind() != constant.Float {
log.Fatalf("gcimporter: unexpected constant %v, want float", x)
}
// extract sign (there is no -0)
sign := constant.Sign(x)
if sign == 0 {
// x == 0
p.int(0)
return
}
// x != 0
var f big.Float
if v, exact := constant.Float64Val(x); exact {
// float64
f.SetFloat64(v)
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
// TODO(gri): add big.Rat accessor to constant.Value.
r := valueToRat(num)
f.SetRat(r.Quo(r, valueToRat(denom)))
} else {
// Value too large to represent as a fraction => inaccessible.
// TODO(gri): add big.Float accessor to constant.Value.
f.SetFloat64(math.MaxFloat64) // FIXME
}
// extract exponent such that 0.5 <= m < 1.0
var m big.Float
exp := f.MantExp(&m)
// extract mantissa as *big.Int
// - set exponent large enough so mant satisfies mant.IsInt()
// - get *big.Int from mant
m.SetMantExp(&m, int(m.MinPrec()))
mant, acc := m.Int(nil)
if acc != big.Exact {
log.Fatalf("gcimporter: internal error")
}
p.int(sign)
p.int(exp)
p.string(string(mant.Bytes()))
}
func valueToRat(x constant.Value) *big.Rat {
// Convert little-endian to big-endian.
// I can't believe this is necessary.
bytes := constant.Bytes(x)
for i := 0; i < len(bytes)/2; i++ {
bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
}
return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
}
func (p *exporter) bool(b bool) bool {
if trace {
p.tracef("[")
defer p.tracef("= %v] ", b)
}
x := 0
if b {
x = 1
}
p.int(x)
return b
}
// ----------------------------------------------------------------------------
// Low-level encoders
func (p *exporter) index(marker byte, index int) {
if index < 0 {
log.Fatalf("gcimporter: invalid index < 0")
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%c%d ", marker, index)
}
p.rawInt64(int64(index))
}
func (p *exporter) tag(tag int) {
if tag >= 0 {
log.Fatalf("gcimporter: invalid tag >= 0")
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%s ", tagString[-tag])
}
p.rawInt64(int64(tag))
}
func (p *exporter) int(x int) {
p.int64(int64(x))
}
func (p *exporter) int64(x int64) {
if debugFormat {
p.marker('i')
}
if trace {
p.tracef("%d ", x)
}
p.rawInt64(x)
}
func (p *exporter) string(s string) {
if debugFormat {
p.marker('s')
}
if trace {
p.tracef("%q ", s)
}
// if we saw the string before, write its index (>= 0)
// (the empty string is mapped to 0)
if i, ok := p.strIndex[s]; ok {
p.rawInt64(int64(i))
return
}
// otherwise, remember string and write its negative length and bytes
p.strIndex[s] = len(p.strIndex)
p.rawInt64(-int64(len(s)))
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
}
// marker emits a marker byte and position information which makes
// it easy for a reader to detect if it is "out of sync". Used for
// debugFormat format only.
func (p *exporter) marker(m byte) {
p.rawByte(m)
// Enable this for help tracking down the location
// of an incorrect marker when running in debugFormat.
if false && trace {
p.tracef("#%d ", p.written)
}
p.rawInt64(int64(p.written))
}
// rawInt64 should only be used by low-level encoders.
func (p *exporter) rawInt64(x int64) {
var tmp [binary.MaxVarintLen64]byte
n := binary.PutVarint(tmp[:], x)
for i := 0; i < n; i++ {
p.rawByte(tmp[i])
}
}
// rawStringln should only be used to emit the initial version string.
func (p *exporter) rawStringln(s string) {
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
p.rawByte('\n')
}
// rawByte is the bottleneck interface to write to p.out.
// rawByte escapes b as follows (any encoding does that
// hides '$'):
//
// '$' => '|' 'S'
// '|' => '|' '|'
//
// Necessary so other tools can find the end of the
// export data by searching for "$$".
// rawByte should only be used by low-level encoders.
func (p *exporter) rawByte(b byte) {
switch b {
case '$':
// write '$' as '|' 'S'
b = 'S'
fallthrough
case '|':
// write '|' as '|' '|'
p.out.WriteByte('|')
p.written++
}
p.out.WriteByte(b)
p.written++
}
// tracef is like fmt.Printf but it rewrites the format string
// to take care of indentation.
func (p *exporter) tracef(format string, args ...interface{}) {
if strings.ContainsAny(format, "<>\n") {
var buf bytes.Buffer
for i := 0; i < len(format); i++ {
// no need to deal with runes
ch := format[i]
switch ch {
case '>':
p.indent++
continue
case '<':
p.indent--
continue
}
buf.WriteByte(ch)
if ch == '\n' {
for j := p.indent; j > 0; j-- {
buf.WriteString(". ")
}
}
}
format = buf.String()
}
fmt.Printf(format, args...)
}
// Debugging support.
// (tagString is only used when tracing is enabled)
var tagString = [...]string{
// Packages
-packageTag: "package",
// Types
-namedTag: "named type",
-arrayTag: "array",
-sliceTag: "slice",
-dddTag: "ddd",
-structTag: "struct",
-pointerTag: "pointer",
-signatureTag: "signature",
-interfaceTag: "interface",
-mapTag: "map",
-chanTag: "chan",
// Values
-falseTag: "false",
-trueTag: "true",
-int64Tag: "int64",
-floatTag: "float",
-fractionTag: "fraction",
-complexTag: "complex",
-stringTag: "string",
-unknownTag: "unknown",
// Type aliases
-aliasTag: "alias",
}

993
vendor/golang.org/x/tools/go/gcimporter15/bimport.go generated vendored Normal file
View File

@@ -0,0 +1,993 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
package gcimporter
import (
"encoding/binary"
"fmt"
"go/constant"
"go/token"
"go/types"
"sort"
"strconv"
"strings"
"sync"
"unicode"
"unicode/utf8"
)
type importer struct {
imports map[string]*types.Package
data []byte
importpath string
buf []byte // for reading strings
version int // export format version
// object lists
strList []string // in order of appearance
pathList []string // in order of appearance
pkgList []*types.Package // in order of appearance
typList []types.Type // in order of appearance
interfaceList []*types.Interface // for delayed completion only
trackAllTypes bool
// position encoding
posInfoFormat bool
prevFile string
prevLine int
fset *token.FileSet
files map[string]*token.File
// debugging support
debugFormat bool
read int // bytes read
}
// BImportData imports a package from the serialized package data
// and returns the number of bytes consumed and a reference to the package.
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
// catch panics and return them as errors
defer func() {
if e := recover(); e != nil {
// The package (filename) causing the problem is added to this
// error by a wrapper in the caller (Import in gcimporter.go).
// Return a (possibly nil or incomplete) package unchanged (see #16088).
err = fmt.Errorf("cannot import, possibly version skew (%v) - reinstall package", e)
}
}()
p := importer{
imports: imports,
data: data,
importpath: path,
version: -1, // unknown version
strList: []string{""}, // empty string is mapped to 0
pathList: []string{""}, // empty string is mapped to 0
fset: fset,
files: make(map[string]*token.File),
}
// read version info
var versionstr string
if b := p.rawByte(); b == 'c' || b == 'd' {
// Go1.7 encoding; first byte encodes low-level
// encoding format (compact vs debug).
// For backward-compatibility only (avoid problems with
// old installed packages). Newly compiled packages use
// the extensible format string.
// TODO(gri) Remove this support eventually; after Go1.8.
if b == 'd' {
p.debugFormat = true
}
p.trackAllTypes = p.rawByte() == 'a'
p.posInfoFormat = p.int() != 0
versionstr = p.string()
if versionstr == "v1" {
p.version = 0
}
} else {
// Go1.8 extensible encoding
// read version string and extract version number (ignore anything after the version number)
versionstr = p.rawStringln(b)
if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
p.version = v
}
}
}
// read version specific flags - extend as necessary
switch p.version {
// case 6:
// ...
// fallthrough
case 5, 4, 3, 2, 1:
p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
p.trackAllTypes = p.int() != 0
p.posInfoFormat = p.int() != 0
case 0:
// Go1.7 encoding format - nothing to do here
default:
errorf("unknown export format version %d (%q)", p.version, versionstr)
}
// --- generic export data ---
// populate typList with predeclared "known" types
p.typList = append(p.typList, predeclared...)
// read package data
pkg = p.pkg()
// read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go)
objcount := 0
for {
tag := p.tagOrIndex()
if tag == endTag {
break
}
p.obj(tag)
objcount++
}
// self-verification
if count := p.int(); count != objcount {
errorf("got %d objects; want %d", objcount, count)
}
// ignore compiler-specific import data
// complete interfaces
// TODO(gri) re-investigate if we still need to do this in a delayed fashion
for _, typ := range p.interfaceList {
typ.Complete()
}
// record all referenced packages as imports
list := append(([]*types.Package)(nil), p.pkgList[1:]...)
sort.Sort(byPath(list))
pkg.SetImports(list)
// package was imported completely and without errors
pkg.MarkComplete()
return p.read, pkg, nil
}
func errorf(format string, args ...interface{}) {
panic(fmt.Sprintf(format, args...))
}
func (p *importer) pkg() *types.Package {
// if the package was seen before, i is its index (>= 0)
i := p.tagOrIndex()
if i >= 0 {
return p.pkgList[i]
}
// otherwise, i is the package tag (< 0)
if i != packageTag {
errorf("unexpected package tag %d version %d", i, p.version)
}
// read package data
name := p.string()
var path string
if p.version >= 5 {
path = p.path()
} else {
path = p.string()
}
// we should never see an empty package name
if name == "" {
errorf("empty package name in import")
}
// an empty path denotes the package we are currently importing;
// it must be the first package we see
if (path == "") != (len(p.pkgList) == 0) {
errorf("package path %q for pkg index %d", path, len(p.pkgList))
}
// if the package was imported before, use that one; otherwise create a new one
if path == "" {
path = p.importpath
}
pkg := p.imports[path]
if pkg == nil {
pkg = types.NewPackage(path, name)
p.imports[path] = pkg
} else if pkg.Name() != name {
errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
}
p.pkgList = append(p.pkgList, pkg)
return pkg
}
// objTag returns the tag value for each object kind.
func objTag(obj types.Object) int {
switch obj.(type) {
case *types.Const:
return constTag
case *types.TypeName:
return typeTag
case *types.Var:
return varTag
case *types.Func:
return funcTag
default:
errorf("unexpected object: %v (%T)", obj, obj) // panics
panic("unreachable")
}
}
func sameObj(a, b types.Object) bool {
// Because unnamed types are not canonicalized, we cannot simply compare types for
// (pointer) identity.
// Ideally we'd check equality of constant values as well, but this is good enough.
return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
}
func (p *importer) declare(obj types.Object) {
pkg := obj.Pkg()
if alt := pkg.Scope().Insert(obj); alt != nil {
// This can only trigger if we import a (non-type) object a second time.
// Excluding type aliases, this cannot happen because 1) we only import a package
// once; and b) we ignore compiler-specific export data which may contain
// functions whose inlined function bodies refer to other functions that
// were already imported.
// However, type aliases require reexporting the original type, so we need
// to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
// method importer.obj, switch case importing functions).
// TODO(gri) review/update this comment once the gc compiler handles type aliases.
if !sameObj(obj, alt) {
errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
}
}
}
func (p *importer) obj(tag int) {
switch tag {
case constTag:
pos := p.pos()
pkg, name := p.qualifiedName()
typ := p.typ(nil)
val := p.value()
p.declare(types.NewConst(pos, pkg, name, typ, val))
case aliasTag:
// TODO(gri) verify type alias hookup is correct
pos := p.pos()
pkg, name := p.qualifiedName()
typ := p.typ(nil)
p.declare(types.NewTypeName(pos, pkg, name, typ))
case typeTag:
p.typ(nil)
case varTag:
pos := p.pos()
pkg, name := p.qualifiedName()
typ := p.typ(nil)
p.declare(types.NewVar(pos, pkg, name, typ))
case funcTag:
pos := p.pos()
pkg, name := p.qualifiedName()
params, isddd := p.paramList()
result, _ := p.paramList()
sig := types.NewSignature(nil, params, result, isddd)
p.declare(types.NewFunc(pos, pkg, name, sig))
default:
errorf("unexpected object tag %d", tag)
}
}
const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
func (p *importer) pos() token.Pos {
if !p.posInfoFormat {
return token.NoPos
}
file := p.prevFile
line := p.prevLine
delta := p.int()
line += delta
if p.version >= 5 {
if delta == deltaNewFile {
if n := p.int(); n >= 0 {
// file changed
file = p.path()
line = n
}
}
} else {
if delta == 0 {
if n := p.int(); n >= 0 {
// file changed
file = p.prevFile[:n] + p.string()
line = p.int()
}
}
}
p.prevFile = file
p.prevLine = line
// Synthesize a token.Pos
// Since we don't know the set of needed file positions, we
// reserve maxlines positions per file.
const maxlines = 64 * 1024
f := p.files[file]
if f == nil {
f = p.fset.AddFile(file, -1, maxlines)
p.files[file] = f
// Allocate the fake linebreak indices on first use.
// TODO(adonovan): opt: save ~512KB using a more complex scheme?
fakeLinesOnce.Do(func() {
fakeLines = make([]int, maxlines)
for i := range fakeLines {
fakeLines[i] = i
}
})
f.SetLines(fakeLines)
}
if line > maxlines {
line = 1
}
// Treat the file as if it contained only newlines
// and column=1: use the line number as the offset.
return f.Pos(line - 1)
}
var (
fakeLines []int
fakeLinesOnce sync.Once
)
func (p *importer) qualifiedName() (pkg *types.Package, name string) {
name = p.string()
pkg = p.pkg()
return
}
func (p *importer) record(t types.Type) {
p.typList = append(p.typList, t)
}
// A dddSlice is a types.Type representing ...T parameters.
// It only appears for parameter types and does not escape
// the importer.
type dddSlice struct {
elem types.Type
}
func (t *dddSlice) Underlying() types.Type { return t }
func (t *dddSlice) String() string { return "..." + t.elem.String() }
// parent is the package which declared the type; parent == nil means
// the package currently imported. The parent package is needed for
// exported struct fields and interface methods which don't contain
// explicit package information in the export data.
func (p *importer) typ(parent *types.Package) types.Type {
// if the type was seen before, i is its index (>= 0)
i := p.tagOrIndex()
if i >= 0 {
return p.typList[i]
}
// otherwise, i is the type tag (< 0)
switch i {
case namedTag:
// read type object
pos := p.pos()
parent, name := p.qualifiedName()
scope := parent.Scope()
obj := scope.Lookup(name)
// if the object doesn't exist yet, create and insert it
if obj == nil {
obj = types.NewTypeName(pos, parent, name, nil)
scope.Insert(obj)
}
if _, ok := obj.(*types.TypeName); !ok {
errorf("pkg = %s, name = %s => %s", parent, name, obj)
}
// associate new named type with obj if it doesn't exist yet
t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
// but record the existing type, if any
t := obj.Type().(*types.Named)
p.record(t)
// read underlying type
t0.SetUnderlying(p.typ(parent))
// interfaces don't have associated methods
if types.IsInterface(t0) {
return t
}
// read associated methods
for i := p.int(); i > 0; i-- {
// TODO(gri) replace this with something closer to fieldName
pos := p.pos()
name := p.string()
if !exported(name) {
p.pkg()
}
recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
params, isddd := p.paramList()
result, _ := p.paramList()
p.int() // go:nointerface pragma - discarded
sig := types.NewSignature(recv.At(0), params, result, isddd)
t0.AddMethod(types.NewFunc(pos, parent, name, sig))
}
return t
case arrayTag:
t := new(types.Array)
if p.trackAllTypes {
p.record(t)
}
n := p.int64()
*t = *types.NewArray(p.typ(parent), n)
return t
case sliceTag:
t := new(types.Slice)
if p.trackAllTypes {
p.record(t)
}
*t = *types.NewSlice(p.typ(parent))
return t
case dddTag:
t := new(dddSlice)
if p.trackAllTypes {
p.record(t)
}
t.elem = p.typ(parent)
return t
case structTag:
t := new(types.Struct)
if p.trackAllTypes {
p.record(t)
}
*t = *types.NewStruct(p.fieldList(parent))
return t
case pointerTag:
t := new(types.Pointer)
if p.trackAllTypes {
p.record(t)
}
*t = *types.NewPointer(p.typ(parent))
return t
case signatureTag:
t := new(types.Signature)
if p.trackAllTypes {
p.record(t)
}
params, isddd := p.paramList()
result, _ := p.paramList()
*t = *types.NewSignature(nil, params, result, isddd)
return t
case interfaceTag:
// Create a dummy entry in the type list. This is safe because we
// cannot expect the interface type to appear in a cycle, as any
// such cycle must contain a named type which would have been
// first defined earlier.
n := len(p.typList)
if p.trackAllTypes {
p.record(nil)
}
var embeddeds []*types.Named
for n := p.int(); n > 0; n-- {
p.pos()
embeddeds = append(embeddeds, p.typ(parent).(*types.Named))
}
t := types.NewInterface(p.methodList(parent), embeddeds)
p.interfaceList = append(p.interfaceList, t)
if p.trackAllTypes {
p.typList[n] = t
}
return t
case mapTag:
t := new(types.Map)
if p.trackAllTypes {
p.record(t)
}
key := p.typ(parent)
val := p.typ(parent)
*t = *types.NewMap(key, val)
return t
case chanTag:
t := new(types.Chan)
if p.trackAllTypes {
p.record(t)
}
var dir types.ChanDir
// tag values must match the constants in cmd/compile/internal/gc/go.go
switch d := p.int(); d {
case 1 /* Crecv */ :
dir = types.RecvOnly
case 2 /* Csend */ :
dir = types.SendOnly
case 3 /* Cboth */ :
dir = types.SendRecv
default:
errorf("unexpected channel dir %d", d)
}
val := p.typ(parent)
*t = *types.NewChan(dir, val)
return t
default:
errorf("unexpected type tag %d", i) // panics
panic("unreachable")
}
}
func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
if n := p.int(); n > 0 {
fields = make([]*types.Var, n)
tags = make([]string, n)
for i := range fields {
fields[i], tags[i] = p.field(parent)
}
}
return
}
func (p *importer) field(parent *types.Package) (*types.Var, string) {
pos := p.pos()
pkg, name, alias := p.fieldName(parent)
typ := p.typ(parent)
tag := p.string()
anonymous := false
if name == "" {
// anonymous field - typ must be T or *T and T must be a type name
switch typ := deref(typ).(type) {
case *types.Basic: // basic types are named types
pkg = nil // // objects defined in Universe scope have no package
name = typ.Name()
case *types.Named:
name = typ.Obj().Name()
default:
errorf("named base type expected")
}
anonymous = true
} else if alias {
// anonymous field: we have an explicit name because it's an alias
anonymous = true
}
return types.NewField(pos, pkg, name, typ, anonymous), tag
}
func (p *importer) methodList(parent *types.Package) (methods []*types.Func) {
if n := p.int(); n > 0 {
methods = make([]*types.Func, n)
for i := range methods {
methods[i] = p.method(parent)
}
}
return
}
func (p *importer) method(parent *types.Package) *types.Func {
pos := p.pos()
pkg, name, _ := p.fieldName(parent)
params, isddd := p.paramList()
result, _ := p.paramList()
sig := types.NewSignature(nil, params, result, isddd)
return types.NewFunc(pos, pkg, name, sig)
}
func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
name = p.string()
pkg = parent
if pkg == nil {
// use the imported package instead
pkg = p.pkgList[0]
}
if p.version == 0 && name == "_" {
// version 0 didn't export a package for _ fields
return
}
switch name {
case "":
// 1) field name matches base type name and is exported: nothing to do
case "?":
// 2) field name matches base type name and is not exported: need package
name = ""
pkg = p.pkg()
case "@":
// 3) field name doesn't match type name (alias)
name = p.string()
alias = true
fallthrough
default:
if !exported(name) {
pkg = p.pkg()
}
}
return
}
func (p *importer) paramList() (*types.Tuple, bool) {
n := p.int()
if n == 0 {
return nil, false
}
// negative length indicates unnamed parameters
named := true
if n < 0 {
n = -n
named = false
}
// n > 0
params := make([]*types.Var, n)
isddd := false
for i := range params {
params[i], isddd = p.param(named)
}
return types.NewTuple(params...), isddd
}
func (p *importer) param(named bool) (*types.Var, bool) {
t := p.typ(nil)
td, isddd := t.(*dddSlice)
if isddd {
t = types.NewSlice(td.elem)
}
var pkg *types.Package
var name string
if named {
name = p.string()
if name == "" {
errorf("expected named parameter")
}
if name != "_" {
pkg = p.pkg()
}
if i := strings.Index(name, "·"); i > 0 {
name = name[:i] // cut off gc-specific parameter numbering
}
}
// read and discard compiler-specific info
p.string()
return types.NewVar(token.NoPos, pkg, name, t), isddd
}
func exported(name string) bool {
ch, _ := utf8.DecodeRuneInString(name)
return unicode.IsUpper(ch)
}
func (p *importer) value() constant.Value {
switch tag := p.tagOrIndex(); tag {
case falseTag:
return constant.MakeBool(false)
case trueTag:
return constant.MakeBool(true)
case int64Tag:
return constant.MakeInt64(p.int64())
case floatTag:
return p.float()
case complexTag:
re := p.float()
im := p.float()
return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
case stringTag:
return constant.MakeString(p.string())
case unknownTag:
return constant.MakeUnknown()
default:
errorf("unexpected value tag %d", tag) // panics
panic("unreachable")
}
}
func (p *importer) float() constant.Value {
sign := p.int()
if sign == 0 {
return constant.MakeInt64(0)
}
exp := p.int()
mant := []byte(p.string()) // big endian
// remove leading 0's if any
for len(mant) > 0 && mant[0] == 0 {
mant = mant[1:]
}
// convert to little endian
// TODO(gri) go/constant should have a more direct conversion function
// (e.g., once it supports a big.Float based implementation)
for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
mant[i], mant[j] = mant[j], mant[i]
}
// adjust exponent (constant.MakeFromBytes creates an integer value,
// but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
exp -= len(mant) << 3
if len(mant) > 0 {
for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
exp++
}
}
x := constant.MakeFromBytes(mant)
switch {
case exp < 0:
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
x = constant.BinaryOp(x, token.QUO, d)
case exp > 0:
x = constant.Shift(x, token.SHL, uint(exp))
}
if sign < 0 {
x = constant.UnaryOp(token.SUB, x, 0)
}
return x
}
// ----------------------------------------------------------------------------
// Low-level decoders
func (p *importer) tagOrIndex() int {
if p.debugFormat {
p.marker('t')
}
return int(p.rawInt64())
}
func (p *importer) int() int {
x := p.int64()
if int64(int(x)) != x {
errorf("exported integer too large")
}
return int(x)
}
func (p *importer) int64() int64 {
if p.debugFormat {
p.marker('i')
}
return p.rawInt64()
}
func (p *importer) path() string {
if p.debugFormat {
p.marker('p')
}
// if the path was seen before, i is its index (>= 0)
// (the empty string is at index 0)
i := p.rawInt64()
if i >= 0 {
return p.pathList[i]
}
// otherwise, i is the negative path length (< 0)
a := make([]string, -i)
for n := range a {
a[n] = p.string()
}
s := strings.Join(a, "/")
p.pathList = append(p.pathList, s)
return s
}
func (p *importer) string() string {
if p.debugFormat {
p.marker('s')
}
// if the string was seen before, i is its index (>= 0)
// (the empty string is at index 0)
i := p.rawInt64()
if i >= 0 {
return p.strList[i]
}
// otherwise, i is the negative string length (< 0)
if n := int(-i); n <= cap(p.buf) {
p.buf = p.buf[:n]
} else {
p.buf = make([]byte, n)
}
for i := range p.buf {
p.buf[i] = p.rawByte()
}
s := string(p.buf)
p.strList = append(p.strList, s)
return s
}
func (p *importer) marker(want byte) {
if got := p.rawByte(); got != want {
errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
}
pos := p.read
if n := int(p.rawInt64()); n != pos {
errorf("incorrect position: got %d; want %d", n, pos)
}
}
// rawInt64 should only be used by low-level decoders.
func (p *importer) rawInt64() int64 {
i, err := binary.ReadVarint(p)
if err != nil {
errorf("read error: %v", err)
}
return i
}
// rawStringln should only be used to read the initial version string.
func (p *importer) rawStringln(b byte) string {
p.buf = p.buf[:0]
for b != '\n' {
p.buf = append(p.buf, b)
b = p.rawByte()
}
return string(p.buf)
}
// needed for binary.ReadVarint in rawInt64
func (p *importer) ReadByte() (byte, error) {
return p.rawByte(), nil
}
// byte is the bottleneck interface for reading p.data.
// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
// rawByte should only be used by low-level decoders.
func (p *importer) rawByte() byte {
b := p.data[0]
r := 1
if b == '|' {
b = p.data[1]
r = 2
switch b {
case 'S':
b = '$'
case '|':
// nothing to do
default:
errorf("unexpected escape sequence in export data")
}
}
p.data = p.data[r:]
p.read += r
return b
}
// ----------------------------------------------------------------------------
// Export format
// Tags. Must be < 0.
const (
// Objects
packageTag = -(iota + 1)
constTag
typeTag
varTag
funcTag
endTag
// Types
namedTag
arrayTag
sliceTag
dddTag
structTag
pointerTag
signatureTag
interfaceTag
mapTag
chanTag
// Values
falseTag
trueTag
int64Tag
floatTag
fractionTag // not used by gc
complexTag
stringTag
nilTag // only used by gc (appears in exported inlined function bodies)
unknownTag // not used by gc (only appears in packages with errors)
// Type aliases
aliasTag
)
var predeclared = []types.Type{
// basic types
types.Typ[types.Bool],
types.Typ[types.Int],
types.Typ[types.Int8],
types.Typ[types.Int16],
types.Typ[types.Int32],
types.Typ[types.Int64],
types.Typ[types.Uint],
types.Typ[types.Uint8],
types.Typ[types.Uint16],
types.Typ[types.Uint32],
types.Typ[types.Uint64],
types.Typ[types.Uintptr],
types.Typ[types.Float32],
types.Typ[types.Float64],
types.Typ[types.Complex64],
types.Typ[types.Complex128],
types.Typ[types.String],
// basic type aliases
types.Universe.Lookup("byte").Type(),
types.Universe.Lookup("rune").Type(),
// error
types.Universe.Lookup("error").Type(),
// untyped types
types.Typ[types.UntypedBool],
types.Typ[types.UntypedInt],
types.Typ[types.UntypedRune],
types.Typ[types.UntypedFloat],
types.Typ[types.UntypedComplex],
types.Typ[types.UntypedString],
types.Typ[types.UntypedNil],
// package unsafe
types.Typ[types.UnsafePointer],
// invalid type
types.Typ[types.Invalid], // only appears in packages with errors
// used internally by gc; never used by this package or in .a files
anyType{},
}
type anyType struct{}
func (t anyType) Underlying() types.Type { return t }
func (t anyType) String() string { return "any" }

View File

@@ -0,0 +1,93 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
// This file implements FindExportData.
package gcimporter
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
if err != nil {
return
}
// leave for debugging
if false {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
}
name = strings.TrimSpace(string(hdr[:16]))
return
}
// FindExportData positions the reader r at the beginning of the
// export data section of an underlying GC-created object/archive
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function. The hdr result
// is the string before the export data, either "$$" or "$$B".
//
func FindExportData(r *bufio.Reader) (hdr string, err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF.
var name string
if name, _, err = readGopackHeader(r); err != nil {
return
}
// First entry should be __.PKGDEF.
if name != "__.PKGDEF" {
err = fmt.Errorf("go archive is missing __.PKGDEF")
return
}
// Read first line of __.PKGDEF data, so that line
// is once again the first line of the input.
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
// Now at __.PKGDEF in archive or still at beginning of file.
// Either way, line should begin with "go object ".
if !strings.HasPrefix(string(line), "go object ") {
err = fmt.Errorf("not a Go object file")
return
}
// Skip over object header to export data.
// Begins after first line starting with $$.
for line[0] != '$' {
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
hdr = string(line)
return
}

1041
vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

13
vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go generated vendored Normal file
View File

@@ -0,0 +1,13 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.9
package gcimporter
import "go/types"
func isAlias(obj *types.TypeName) bool {
return false // there are no type aliases before Go 1.9
}

13
vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go generated vendored Normal file
View File

@@ -0,0 +1,13 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.9
package gcimporter
import "go/types"
func isAlias(obj *types.TypeName) bool {
return obj.IsAlias()
}