Run godep-save.sh and update-staging-godeps.sh

pull/6/head
Allen Petersen 2018-01-02 21:55:25 -08:00
parent f0b1dfd33f
commit 6d19de343f
22 changed files with 12859 additions and 95 deletions

164
Godeps/Godeps.json generated
View File

@ -442,47 +442,47 @@
},
{
"ImportPath": "github.com/containerd/containerd/api/services/containers/v1",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/api/services/tasks/v1",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/api/services/version/v1",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/api/types",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/api/types/task",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/containers",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/dialer",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/errdefs",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
"ImportPath": "github.com/containerd/containerd/namespaces",
"Comment": "v1.0.0-beta.2-159-g27d450a0",
"Comment": "v1.0.0-beta.2-159-g27d450a",
"Rev": "27d450a01bb533d7ebc5701eb52792565396b084"
},
{
@ -907,152 +907,152 @@
},
{
"ImportPath": "github.com/docker/distribution/digestset",
"Comment": "v2.6.0-rc.1-209-gedc3ab29",
"Comment": "v2.6.0-rc.1-209-gedc3ab2",
"Rev": "edc3ab29cdff8694dd6feb85cfeb4b5f1b38ed9c"
},
{
"ImportPath": "github.com/docker/distribution/reference",
"Comment": "v2.6.0-rc.1-209-gedc3ab29",
"Comment": "v2.6.0-rc.1-209-gedc3ab2",
"Rev": "edc3ab29cdff8694dd6feb85cfeb4b5f1b38ed9c"
},
{
"ImportPath": "github.com/docker/docker/api",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/blkiodev",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/container",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/events",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/filters",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/image",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/mount",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/network",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/registry",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/strslice",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/swarm",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/swarm/runtime",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/time",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/versions",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/api/types/volume",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/client",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/ioutils",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/jsonlog",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/jsonmessage",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/longpath",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/mount",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/stdcopy",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/symlink",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/system",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/term",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/term/windows",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
"ImportPath": "github.com/docker/docker/pkg/tlsconfig",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616fb1",
"Comment": "docs-v1.12.0-rc4-2016-07-15-7401-g4f3616f",
"Rev": "4f3616fb1c112e206b88cb7a9922bf49067a7756"
},
{
@ -1077,7 +1077,7 @@
},
{
"ImportPath": "github.com/docker/libnetwork/ipvs",
"Comment": "v0.8.0-dev.2-910-gba46b928",
"Comment": "v0.8.0-dev.2-910-gba46b92",
"Rev": "ba46b928444931e6865d8618dc03622cac79aa6f"
},
{
@ -1204,132 +1204,132 @@
},
{
"ImportPath": "github.com/gogo/protobuf/gogoproto",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/compare",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/defaultcheck",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/description",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/embedcheck",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/enumstringer",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/equal",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/face",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/gostring",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/marshalto",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/oneofcheck",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/populate",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/size",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/stringer",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/testgen",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/union",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/plugin/unmarshal",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/proto",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/protoc-gen-gogo/descriptor",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/protoc-gen-gogo/generator",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/protoc-gen-gogo/grpc",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/protoc-gen-gogo/plugin",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/sortkeys",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/types",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/vanity",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
"ImportPath": "github.com/gogo/protobuf/vanity/command",
"Comment": "v0.4-3-gc0656edd",
"Comment": "v0.4-3-gc0656ed",
"Rev": "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
},
{
@ -2251,77 +2251,77 @@
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/apparmor",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups/fs",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups/rootless",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups/systemd",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/configs",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/configs/validate",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/criurpc",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/keys",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/seccomp",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/stacktrace",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/system",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/user",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
"ImportPath": "github.com/opencontainers/runc/libcontainer/utils",
"Comment": "v1.0.0-rc4-50-g4d6e6720",
"Comment": "v1.0.0-rc4-50-g4d6e672",
"Rev": "4d6e6720a7c885c37b4cb083c0d372dda3425120"
},
{
@ -3142,4 +3142,4 @@
"Rev": "db5cfe13f5cc80a4990d98e2e1b0707a4d1a5394"
}
]
}
}

70
Godeps/LICENSES generated
View File

@ -85305,6 +85305,41 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================================================
================================================================================
= vendor/golang.org/x/tools/go/ast/astutil licensed under: =
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
= vendor/golang.org/x/tools/LICENSE 5d4950ecb7b26d2c5e4e7b4e0dd74707
================================================================================
================================================================================
= vendor/golang.org/x/tools/go/vcs licensed under: =
@ -85340,6 +85375,41 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================================================
================================================================================
= vendor/golang.org/x/tools/imports licensed under: =
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
= vendor/golang.org/x/tools/LICENSE 5d4950ecb7b26d2c5e4e7b4e0dd74707
================================================================================
================================================================================
= vendor/google.golang.org/api/cloudmonitoring/v2beta2 licensed under: =

View File

@ -206,49 +206,57 @@
"ImportPath": "golang.org/x/text/width",
"Rev": "b19bf474d317b857955b12035d2c5acb57ce8b01"
},
{
"ImportPath": "golang.org/x/tools/go/ast/astutil",
"Rev": "2382e3994d48b1d22acc2c86bcad0a2aff028e32"
},
{
"ImportPath": "golang.org/x/tools/imports",
"Rev": "2382e3994d48b1d22acc2c86bcad0a2aff028e32"
},
{
"ImportPath": "gopkg.in/yaml.v2",
"Rev": "53feefa2559fb8dfa8d81baad31be332c97d6c77"
},
{
"ImportPath": "k8s.io/gengo/args",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/examples/deepcopy-gen/generators",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/examples/defaulter-gen/generators",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/examples/import-boss/generators",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/examples/set-gen/generators",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/examples/set-gen/sets",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/generator",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/namer",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/parser",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/gengo/types",
"Rev": "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4"
"Rev": "b6c426f7730e6d66e6e476a85d1c3eb7633880e0"
},
{
"ImportPath": "k8s.io/kube-openapi/pkg/common",

2
vendor/BUILD vendored
View File

@ -380,7 +380,9 @@ filegroup(
"//vendor/golang.org/x/text/width:all-srcs",
"//vendor/golang.org/x/time/rate:all-srcs",
"//vendor/golang.org/x/tools/container/intsets:all-srcs",
"//vendor/golang.org/x/tools/go/ast/astutil:all-srcs",
"//vendor/golang.org/x/tools/go/vcs:all-srcs",
"//vendor/golang.org/x/tools/imports:all-srcs",
"//vendor/google.golang.org/api/cloudmonitoring/v2beta2:all-srcs",
"//vendor/google.golang.org/api/compute/v0.alpha:all-srcs",
"//vendor/google.golang.org/api/compute/v0.beta:all-srcs",

26
vendor/golang.org/x/tools/go/ast/astutil/BUILD generated vendored Normal file
View File

@ -0,0 +1,26 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"enclosing.go",
"imports.go",
"util.go",
],
importpath = "golang.org/x/tools/go/ast/astutil",
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

627
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go generated vendored Normal file
View File

@ -0,0 +1,627 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
// This file defines utilities for working with source positions.
import (
"fmt"
"go/ast"
"go/token"
"sort"
)
// PathEnclosingInterval returns the node that encloses the source
// interval [start, end), and all its ancestors up to the AST root.
//
// The definition of "enclosing" used by this function considers
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
// z := x + y // add them
// <-A->
// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
// This behaviour makes user interfaces more tolerant of imperfect
// input.
//
// This function treats tokens as nodes, though they are not included
// in the result. e.g. PathEnclosingInterval("+") returns the
// enclosing ast.BinaryExpr("x + y").
//
// If start==end, the 1-char interval following start is used instead.
//
// The 'exact' result is true if the interval contains only path[0]
// and perhaps some adjacent whitespace. It is false if the interval
// overlaps multiple children of path[0], or if it contains only
// interior whitespace of path[0].
// In this example:
//
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
// x, +). So too is the 1-char interval D, because it contains only
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
// Precondition: [start, end) both lie within the same file as root.
// TODO(adonovan): return (nil, false) in this case and remove precond.
// Requires FileSet; see loader.tokenFileContainsPos.
//
// Postcondition: path is never nil; it always contains at least 'root'.
//
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
var visit func(node ast.Node) bool
visit = func(node ast.Node) bool {
path = append(path, node)
nodePos := node.Pos()
nodeEnd := node.End()
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
// Intersect [start, end) with interval of node.
if start < nodePos {
start = nodePos
}
if end > nodeEnd {
end = nodeEnd
}
// Find sole child that contains [start, end).
children := childrenOf(node)
l := len(children)
for i, child := range children {
// [childPos, childEnd) is unaugmented interval of child.
childPos := child.Pos()
childEnd := child.End()
// [augPos, augEnd) is whitespace-augmented interval of child.
augPos := childPos
augEnd := childEnd
if i > 0 {
augPos = children[i-1].End() // start of preceding whitespace
}
if i < l-1 {
nextChildPos := children[i+1].Pos()
// Does [start, end) lie between child and next child?
if start >= augEnd && end <= nextChildPos {
return false // inexact match
}
augEnd = nextChildPos // end of following whitespace
}
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
// i, augPos, augEnd, start, end) // debugging
// Does augmented child strictly contain [start, end)?
if augPos <= start && end <= augEnd {
_, isToken := child.(tokenNode)
return isToken || visit(child)
}
// Does [start, end) overlap multiple children?
// i.e. left-augmented child contains start
// but LR-augmented child does not contain end.
if start < childEnd && end > augEnd {
break
}
}
// No single child contained [start, end),
// so node is the result. Is it exact?
// (It's tempting to put this condition before the
// child loop, but it gives the wrong result in the
// case where a node (e.g. ExprStmt) and its sole
// child have equal intervals.)
if start == nodePos && end == nodeEnd {
return true // exact match
}
return false // inexact: overlaps multiple children
}
if start > end {
start, end = end, start
}
if start < root.End() && end > root.Pos() {
if start == end {
end = start + 1 // empty interval => interval of size 1
}
exact = visit(root)
// Reverse the path:
for i, l := 0, len(path); i < l/2; i++ {
path[i], path[l-1-i] = path[l-1-i], path[i]
}
} else {
// Selection lies within whitespace preceding the
// first (or following the last) declaration in the file.
// The result nonetheless always includes the ast.File.
path = append(path, root)
}
return
}
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
//
type tokenNode struct {
pos token.Pos
end token.Pos
}
func (n tokenNode) Pos() token.Pos {
return n.pos
}
func (n tokenNode) End() token.Pos {
return n.end
}
func tok(pos token.Pos, len int) ast.Node {
return tokenNode{pos, pos + token.Pos(len)}
}
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
//
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
// First add nodes for all true subtrees.
ast.Inspect(n, func(node ast.Node) bool {
if node == n { // push n
return true // recur
}
if node != nil { // push child
children = append(children, node)
}
return false // no recursion
})
// Then add fake Nodes for bare tokens.
switch n := n.(type) {
case *ast.ArrayType:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Elt.End(), len("]")))
case *ast.AssignStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.BasicLit:
children = append(children,
tok(n.ValuePos, len(n.Value)))
case *ast.BinaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.BlockStmt:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("}")))
case *ast.BranchStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.CallExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
if n.Ellipsis != 0 {
children = append(children, tok(n.Ellipsis, len("...")))
}
case *ast.CaseClause:
if n.List == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.ChanType:
switch n.Dir {
case ast.RECV:
children = append(children, tok(n.Begin, len("<-chan")))
case ast.SEND:
children = append(children, tok(n.Begin, len("chan<-")))
case ast.RECV | ast.SEND:
children = append(children, tok(n.Begin, len("chan")))
}
case *ast.CommClause:
if n.Comm == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.Comment:
// nop
case *ast.CommentGroup:
// nop
case *ast.CompositeLit:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("{")))
case *ast.DeclStmt:
// nop
case *ast.DeferStmt:
children = append(children,
tok(n.Defer, len("defer")))
case *ast.Ellipsis:
children = append(children,
tok(n.Ellipsis, len("...")))
case *ast.EmptyStmt:
// nop
case *ast.ExprStmt:
// nop
case *ast.Field:
// TODO(adonovan): Field.{Doc,Comment,Tag}?
case *ast.FieldList:
children = append(children,
tok(n.Opening, len("(")),
tok(n.Closing, len(")")))
case *ast.File:
// TODO test: Doc
children = append(children,
tok(n.Package, len("package")))
case *ast.ForStmt:
children = append(children,
tok(n.For, len("for")))
case *ast.FuncDecl:
// TODO(adonovan): FuncDecl.Comment?
// Uniquely, FuncDecl breaks the invariant that
// preorder traversal yields tokens in lexical order:
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
//
// As a workaround, we inline the case for FuncType
// here and order things correctly.
//
children = nil // discard ast.Walk(FuncDecl) info subtrees
children = append(children, tok(n.Type.Func, len("func")))
if n.Recv != nil {
children = append(children, n.Recv)
}
children = append(children, n.Name)
if n.Type.Params != nil {
children = append(children, n.Type.Params)
}
if n.Type.Results != nil {
children = append(children, n.Type.Results)
}
if n.Body != nil {
children = append(children, n.Body)
}
case *ast.FuncLit:
// nop
case *ast.FuncType:
if n.Func != 0 {
children = append(children,
tok(n.Func, len("func")))
}
case *ast.GenDecl:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
if n.Lparen != 0 {
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
}
case *ast.GoStmt:
children = append(children,
tok(n.Go, len("go")))
case *ast.Ident:
children = append(children,
tok(n.NamePos, len(n.Name)))
case *ast.IfStmt:
children = append(children,
tok(n.If, len("if")))
case *ast.ImportSpec:
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
case *ast.IncDecStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.IndexExpr:
children = append(children,
tok(n.Lbrack, len("{")),
tok(n.Rbrack, len("}")))
case *ast.InterfaceType:
children = append(children,
tok(n.Interface, len("interface")))
case *ast.KeyValueExpr:
children = append(children,
tok(n.Colon, len(":")))
case *ast.LabeledStmt:
children = append(children,
tok(n.Colon, len(":")))
case *ast.MapType:
children = append(children,
tok(n.Map, len("map")))
case *ast.ParenExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.RangeStmt:
children = append(children,
tok(n.For, len("for")),
tok(n.TokPos, len(n.Tok.String())))
case *ast.ReturnStmt:
children = append(children,
tok(n.Return, len("return")))
case *ast.SelectStmt:
children = append(children,
tok(n.Select, len("select")))
case *ast.SelectorExpr:
// nop
case *ast.SendStmt:
children = append(children,
tok(n.Arrow, len("<-")))
case *ast.SliceExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.StarExpr:
children = append(children, tok(n.Star, len("*")))
case *ast.StructType:
children = append(children, tok(n.Struct, len("struct")))
case *ast.SwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.TypeAssertExpr:
children = append(children,
tok(n.Lparen-1, len(".")),
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.TypeSpec:
// TODO(adonovan): TypeSpec.{Doc,Comment}?
case *ast.TypeSwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.UnaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.ValueSpec:
// TODO(adonovan): ValueSpec.{Doc,Comment}?
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
// nop
}
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
// the switch above so we can make interleaved callbacks for
// both Nodes and Tokens in the right order and avoid the need
// to sort.
sort.Sort(byPos(children))
return children
}
type byPos []ast.Node
func (sl byPos) Len() int {
return len(sl)
}
func (sl byPos) Less(i, j int) bool {
return sl[i].Pos() < sl[j].Pos()
}
func (sl byPos) Swap(i, j int) {
sl[i], sl[j] = sl[j], sl[i]
}
// NodeDescription returns a description of the concrete type of n suitable
// for a user interface.
//
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
//
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
return "array type"
case *ast.AssignStmt:
return "assignment"
case *ast.BadDecl:
return "bad declaration"
case *ast.BadExpr:
return "bad expression"
case *ast.BadStmt:
return "bad statement"
case *ast.BasicLit:
return "basic literal"
case *ast.BinaryExpr:
return fmt.Sprintf("binary %s operation", n.Op)
case *ast.BlockStmt:
return "block"
case *ast.BranchStmt:
switch n.Tok {
case token.BREAK:
return "break statement"
case token.CONTINUE:
return "continue statement"
case token.GOTO:
return "goto statement"
case token.FALLTHROUGH:
return "fall-through statement"
}
case *ast.CallExpr:
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
return "function call (or conversion)"
}
return "function call"
case *ast.CaseClause:
return "case clause"
case *ast.ChanType:
return "channel type"
case *ast.CommClause:
return "communication clause"
case *ast.Comment:
return "comment"
case *ast.CommentGroup:
return "comment group"
case *ast.CompositeLit:
return "composite literal"
case *ast.DeclStmt:
return NodeDescription(n.Decl) + " statement"
case *ast.DeferStmt:
return "defer statement"
case *ast.Ellipsis:
return "ellipsis"
case *ast.EmptyStmt:
return "empty statement"
case *ast.ExprStmt:
return "expression statement"
case *ast.Field:
// Can be any of these:
// struct {x, y int} -- struct field(s)
// struct {T} -- anon struct field
// interface {I} -- interface embedding
// interface {f()} -- interface method
// func (A) func(B) C -- receiver, param(s), result(s)
return "field/method/parameter"
case *ast.FieldList:
return "field/method/parameter list"
case *ast.File:
return "source file"
case *ast.ForStmt:
return "for loop"
case *ast.FuncDecl:
return "function declaration"
case *ast.FuncLit:
return "function literal"
case *ast.FuncType:
return "function type"
case *ast.GenDecl:
switch n.Tok {
case token.IMPORT:
return "import declaration"
case token.CONST:
return "constant declaration"
case token.TYPE:
return "type declaration"
case token.VAR:
return "variable declaration"
}
case *ast.GoStmt:
return "go statement"
case *ast.Ident:
return "identifier"
case *ast.IfStmt:
return "if statement"
case *ast.ImportSpec:
return "import specification"
case *ast.IncDecStmt:
if n.Tok == token.INC {
return "increment statement"
}
return "decrement statement"
case *ast.IndexExpr:
return "index expression"
case *ast.InterfaceType:
return "interface type"
case *ast.KeyValueExpr:
return "key/value association"
case *ast.LabeledStmt:
return "statement label"
case *ast.MapType:
return "map type"
case *ast.Package:
return "package"
case *ast.ParenExpr:
return "parenthesized " + NodeDescription(n.X)
case *ast.RangeStmt:
return "range loop"
case *ast.ReturnStmt:
return "return statement"
case *ast.SelectStmt:
return "select statement"
case *ast.SelectorExpr:
return "selector"
case *ast.SendStmt:
return "channel send"
case *ast.SliceExpr:
return "slice expression"
case *ast.StarExpr:
return "*-operation" // load/store expr or pointer type
case *ast.StructType:
return "struct type"
case *ast.SwitchStmt:
return "switch statement"
case *ast.TypeAssertExpr:
return "type assertion"
case *ast.TypeSpec:
return "type specification"
case *ast.TypeSwitchStmt:
return "type switch"
case *ast.UnaryExpr:
return fmt.Sprintf("unary %s operation", n.Op)
case *ast.ValueSpec:
return "value specification"
}
panic(fmt.Sprintf("unexpected node type: %T", n))
}

450
vendor/golang.org/x/tools/go/ast/astutil/imports.go generated vendored Normal file
View File

@ -0,0 +1,450 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package astutil contains common utilities for working with the Go AST.
package astutil
import (
"fmt"
"go/ast"
"go/token"
"strconv"
"strings"
)
// AddImport adds the import path to the file f, if absent.
func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
return AddNamedImport(fset, f, "", ipath)
}
// AddNamedImport adds the import path to the file f, if absent.
// If name is not empty, it is used to rename the import.
//
// For example, calling
// AddNamedImport(fset, f, "pathpkg", "path")
// adds
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
if imports(f, ipath) {
return false
}
newImport := &ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: strconv.Quote(ipath),
},
}
if name != "" {
newImport.Name = &ast.Ident{Name: name}
}
// Find an import decl to add to.
// The goal is to find an existing import
// whose import path has the longest shared
// prefix with ipath.
var (
bestMatch = -1 // length of longest shared prefix
lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match
)
for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl)
if ok && gen.Tok == token.IMPORT {
lastImport = i
// Do not add to import "C", to avoid disrupting the
// association with its doc comment, breaking cgo.
if declImports(gen, "C") {
continue
}
// Match an empty import decl if that's all that is available.
if len(gen.Specs) == 0 && bestMatch == -1 {
impDecl = gen
}
// Compute longest shared prefix with imports in this group.
for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
n := matchLen(importPath(impspec), ipath)
if n > bestMatch {
bestMatch = n
impDecl = gen
impIndex = j
}
}
}
}
// If no import decl found, add one after the last import.
if impDecl == nil {
impDecl = &ast.GenDecl{
Tok: token.IMPORT,
}
if lastImport >= 0 {
impDecl.TokPos = f.Decls[lastImport].End()
} else {
// There are no existing imports.
// Our new import goes after the package declaration and after
// the comment, if any, that starts on the same line as the
// package declaration.
impDecl.TokPos = f.Package
file := fset.File(f.Package)
pkgLine := file.Line(f.Package)
for _, c := range f.Comments {
if file.Line(c.Pos()) > pkgLine {
break
}
impDecl.TokPos = c.End()
}
}
f.Decls = append(f.Decls, nil)
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
f.Decls[lastImport+1] = impDecl
}
// Insert new import at insertAt.
insertAt := 0
if impIndex >= 0 {
// insert after the found import
insertAt = impIndex + 1
}
impDecl.Specs = append(impDecl.Specs, nil)
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
impDecl.Specs[insertAt] = newImport
pos := impDecl.Pos()
if insertAt > 0 {
// If there is a comment after an existing import, preserve the comment
// position by adding the new import after the comment.
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
pos = spec.Comment.End()
} else {
// Assign same position as the previous import,
// so that the sorter sees it as being in the same block.
pos = impDecl.Specs[insertAt-1].Pos()
}
}
if newImport.Name != nil {
newImport.Name.NamePos = pos
}
newImport.Path.ValuePos = pos
newImport.EndPos = pos
// Clean up parens. impDecl contains at least one spec.
if len(impDecl.Specs) == 1 {
// Remove unneeded parens.
impDecl.Lparen = token.NoPos
} else if !impDecl.Lparen.IsValid() {
// impDecl needs parens added.
impDecl.Lparen = impDecl.Specs[0].Pos()
}
f.Imports = append(f.Imports, newImport)
if len(f.Decls) <= 1 {
return true
}
// Merge all the import declarations into the first one.
var first *ast.GenDecl
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
continue
}
if first == nil {
first = gen
continue // Don't touch the first one.
}
// We now know there is more than one package in this import
// declaration. Ensure that it ends up parenthesized.
first.Lparen = first.Pos()
// Move the imports of the other import declaration to the first one.
for _, spec := range gen.Specs {
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
i--
}
return true
}
// DeleteImport deletes the import path from the file f, if present.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path)
}
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
var delspecs []*ast.ImportSpec
var delcomments []*ast.CommentGroup
// Find the import nodes that import path, if any.
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT {
continue
}
for j := 0; j < len(gen.Specs); j++ {
spec := gen.Specs[j]
impspec := spec.(*ast.ImportSpec)
if impspec.Name == nil && name != "" {
continue
}
if impspec.Name != nil && impspec.Name.Name != name {
continue
}
if importPath(impspec) != path {
continue
}
// We found an import spec that imports path.
// Delete it.
delspecs = append(delspecs, impspec)
deleted = true
copy(gen.Specs[j:], gen.Specs[j+1:])
gen.Specs = gen.Specs[:len(gen.Specs)-1]
// If this was the last import spec in this decl,
// delete the decl, too.
if len(gen.Specs) == 0 {
copy(f.Decls[i:], f.Decls[i+1:])
f.Decls = f.Decls[:len(f.Decls)-1]
i--
break
} else if len(gen.Specs) == 1 {
if impspec.Doc != nil {
delcomments = append(delcomments, impspec.Doc)
}
if impspec.Comment != nil {
delcomments = append(delcomments, impspec.Comment)
}
for _, cg := range f.Comments {
// Found comment on the same line as the import spec.
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
delcomments = append(delcomments, cg)
break
}
}
spec := gen.Specs[0].(*ast.ImportSpec)
// Move the documentation right after the import decl.
if spec.Doc != nil {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
}
for _, cg := range f.Comments {
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
break
}
}
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
line := fset.Position(impspec.Path.ValuePos).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
if line-lastLine > 1 {
// There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole.
// Do nothing.
} else {
// There was no blank line. Close the hole.
fset.File(gen.Rparen).MergeLine(line)
}
}
j--
}
}
// Delete imports from f.Imports.
for i := 0; i < len(f.Imports); i++ {
imp := f.Imports[i]
for j, del := range delspecs {
if imp == del {
copy(f.Imports[i:], f.Imports[i+1:])
f.Imports = f.Imports[:len(f.Imports)-1]
copy(delspecs[j:], delspecs[j+1:])
delspecs = delspecs[:len(delspecs)-1]
i--
break
}
}
}
// Delete comments from f.Comments.
for i := 0; i < len(f.Comments); i++ {
cg := f.Comments[i]
for j, del := range delcomments {
if cg == del {
copy(f.Comments[i:], f.Comments[i+1:])
f.Comments = f.Comments[:len(f.Comments)-1]
copy(delcomments[j:], delcomments[j+1:])
delcomments = delcomments[:len(delcomments)-1]
i--
break
}
}
}
if len(delspecs) > 0 {
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
}
return
}
// RewriteImport rewrites any import of path oldPath to path newPath.
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
for _, imp := range f.Imports {
if importPath(imp) == oldPath {
rewrote = true
// record old End, because the default is to compute
// it using the length of imp.Path.Value.
imp.EndPos = imp.End()
imp.Path.Value = strconv.Quote(newPath)
}
}
return
}
// UsesImport reports whether a given import is used.
func UsesImport(f *ast.File, path string) (used bool) {
spec := importSpec(f, path)
if spec == nil {
return
}
name := spec.Name.String()
switch name {
case "<nil>":
// If the package name is not explicitly specified,
// make an educated guess. This is not guaranteed to be correct.
lastSlash := strings.LastIndex(path, "/")
if lastSlash == -1 {
name = path
} else {
name = path[lastSlash+1:]
}
case "_", ".":
// Not sure if this import is used - err on the side of caution.
return true
}
ast.Walk(visitFn(func(n ast.Node) {
sel, ok := n.(*ast.SelectorExpr)
if ok && isTopName(sel.X, name) {
used = true
}
}), f)
return
}
type visitFn func(node ast.Node)
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
fn(node)
return fn
}
// imports returns true if f imports path.
func imports(f *ast.File, path string) bool {
return importSpec(f, path) != nil
}
// importSpec returns the import spec if f imports path,
// or nil otherwise.
func importSpec(f *ast.File, path string) *ast.ImportSpec {
for _, s := range f.Imports {
if importPath(s) == path {
return s
}
}
return nil
}
// importPath returns the unquoted import path of s,
// or "" if the path is not properly quoted.
func importPath(s *ast.ImportSpec) string {
t, err := strconv.Unquote(s.Path.Value)
if err == nil {
return t
}
return ""
}
// declImports reports whether gen contains an import of path.
func declImports(gen *ast.GenDecl, path string) bool {
if gen.Tok != token.IMPORT {
return false
}
for _, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
if importPath(impspec) == path {
return true
}
}
return false
}
// matchLen returns the length of the longest path segment prefix shared by x and y.
func matchLen(x, y string) int {
n := 0
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
if x[i] == '/' {
n++
}
}
return n
}
// isTopName returns true if n is a top-level unresolved identifier with the given name.
func isTopName(n ast.Expr, name string) bool {
id, ok := n.(*ast.Ident)
return ok && id.Name == name && id.Obj == nil
}
// Imports returns the file imports grouped by paragraph.
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
var groups [][]*ast.ImportSpec
for _, decl := range f.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.IMPORT {
break
}
group := []*ast.ImportSpec{}
var lastLine int
for _, spec := range genDecl.Specs {
importSpec := spec.(*ast.ImportSpec)
pos := importSpec.Path.ValuePos
line := fset.Position(pos).Line
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
groups = append(groups, group)
group = []*ast.ImportSpec{}
}
group = append(group, importSpec)
lastLine = line
}
groups = append(groups, group)
}
return groups
}

14
vendor/golang.org/x/tools/go/ast/astutil/util.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
package astutil
import "go/ast"
// Unparen returns e with any enclosing parentheses stripped.
func Unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

71
vendor/golang.org/x/tools/imports/BUILD generated vendored Normal file
View File

@ -0,0 +1,71 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"fastwalk.go",
"fix.go",
"imports.go",
"sortimports.go",
"zstdlib.go",
] + select({
"@io_bazel_rules_go//go/platform:android": [
"fastwalk_portable.go",
],
"@io_bazel_rules_go//go/platform:darwin": [
"fastwalk_dirent_ino.go",
"fastwalk_unix.go",
],
"@io_bazel_rules_go//go/platform:dragonfly": [
"fastwalk_portable.go",
],
"@io_bazel_rules_go//go/platform:freebsd": [
"fastwalk_dirent_fileno.go",
"fastwalk_unix.go",
],
"@io_bazel_rules_go//go/platform:linux": [
"fastwalk_dirent_ino.go",
"fastwalk_unix.go",
],
"@io_bazel_rules_go//go/platform:nacl": [
"fastwalk_portable.go",
],
"@io_bazel_rules_go//go/platform:netbsd": [
"fastwalk_dirent_fileno.go",
"fastwalk_unix.go",
],
"@io_bazel_rules_go//go/platform:openbsd": [
"fastwalk_dirent_fileno.go",
"fastwalk_unix.go",
],
"@io_bazel_rules_go//go/platform:plan9": [
"fastwalk_portable.go",
],
"@io_bazel_rules_go//go/platform:solaris": [
"fastwalk_portable.go",
],
"@io_bazel_rules_go//go/platform:windows": [
"fastwalk_portable.go",
],
"//conditions:default": [],
}),
importpath = "golang.org/x/tools/imports",
visibility = ["//visibility:public"],
deps = [
"//vendor/golang.org/x/tools/go/ast/astutil:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

187
vendor/golang.org/x/tools/imports/fastwalk.go generated vendored Normal file
View File

@ -0,0 +1,187 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// A faster implementation of filepath.Walk.
//
// filepath.Walk's design necessarily calls os.Lstat on each file,
// even if the caller needs less info. And goimports only need to know
// the type of each file. The kernel interface provides the type in
// the Readdir call but the standard library ignored it.
// fastwalk_unix.go contains a fork of the syscall routines.
//
// See golang.org/issue/16399
package imports
import (
"errors"
"os"
"path/filepath"
"runtime"
"sync"
)
// traverseLink is a sentinel error for fastWalk, similar to filepath.SkipDir.
var traverseLink = errors.New("traverse symlink, assuming target is a directory")
// fastWalk walks the file tree rooted at root, calling walkFn for
// each file or directory in the tree, including root.
//
// If fastWalk returns filepath.SkipDir, the directory is skipped.
//
// Unlike filepath.Walk:
// * file stat calls must be done by the user.
// The only provided metadata is the file type, which does not include
// any permission bits.
// * multiple goroutines stat the filesystem concurrently. The provided
// walkFn must be safe for concurrent use.
// * fastWalk can follow symlinks if walkFn returns the traverseLink
// sentinel error. It is the walkFn's responsibility to prevent
// fastWalk from going into symlink cycles.
func fastWalk(root string, walkFn func(path string, typ os.FileMode) error) error {
// TODO(bradfitz): make numWorkers configurable? We used a
// minimum of 4 to give the kernel more info about multiple
// things we want, in hopes its I/O scheduling can take
// advantage of that. Hopefully most are in cache. Maybe 4 is
// even too low of a minimum. Profile more.
numWorkers := 4
if n := runtime.NumCPU(); n > numWorkers {
numWorkers = n
}
// Make sure to wait for all workers to finish, otherwise
// walkFn could still be called after returning. This Wait call
// runs after close(e.donec) below.
var wg sync.WaitGroup
defer wg.Wait()
w := &walker{
fn: walkFn,
enqueuec: make(chan walkItem, numWorkers), // buffered for performance
workc: make(chan walkItem, numWorkers), // buffered for performance
donec: make(chan struct{}),
// buffered for correctness & not leaking goroutines:
resc: make(chan error, numWorkers),
}
defer close(w.donec)
for i := 0; i < numWorkers; i++ {
wg.Add(1)
go w.doWork(&wg)
}
todo := []walkItem{{dir: root}}
out := 0
for {
workc := w.workc
var workItem walkItem
if len(todo) == 0 {
workc = nil
} else {
workItem = todo[len(todo)-1]
}
select {
case workc <- workItem:
todo = todo[:len(todo)-1]
out++
case it := <-w.enqueuec:
todo = append(todo, it)
case err := <-w.resc:
out--
if err != nil {
return err
}
if out == 0 && len(todo) == 0 {
// It's safe to quit here, as long as the buffered
// enqueue channel isn't also readable, which might
// happen if the worker sends both another unit of
// work and its result before the other select was
// scheduled and both w.resc and w.enqueuec were
// readable.
select {
case it := <-w.enqueuec:
todo = append(todo, it)
default:
return nil
}
}
}
}
}
// doWork reads directories as instructed (via workc) and runs the
// user's callback function.
func (w *walker) doWork(wg *sync.WaitGroup) {
defer wg.Done()
for {
select {
case <-w.donec:
return
case it := <-w.workc:
select {
case <-w.donec:
return
case w.resc <- w.walk(it.dir, !it.callbackDone):
}
}
}
}
type walker struct {
fn func(path string, typ os.FileMode) error
donec chan struct{} // closed on fastWalk's return
workc chan walkItem // to workers
enqueuec chan walkItem // from workers
resc chan error // from workers
}
type walkItem struct {
dir string
callbackDone bool // callback already called; don't do it again
}
func (w *walker) enqueue(it walkItem) {
select {
case w.enqueuec <- it:
case <-w.donec:
}
}
func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
joined := dirName + string(os.PathSeparator) + baseName
if typ == os.ModeDir {
w.enqueue(walkItem{dir: joined})
return nil
}
err := w.fn(joined, typ)
if typ == os.ModeSymlink {
if err == traverseLink {
// Set callbackDone so we don't call it twice for both the
// symlink-as-symlink and the symlink-as-directory later:
w.enqueue(walkItem{dir: joined, callbackDone: true})
return nil
}
if err == filepath.SkipDir {
// Permit SkipDir on symlinks too.
return nil
}
}
return err
}
func (w *walker) walk(root string, runUserCallback bool) error {
if runUserCallback {
err := w.fn(root, os.ModeDir)
if err == filepath.SkipDir {
return nil
}
if err != nil {
return err
}
}
return readDir(root, w.onDirEnt)
}

View File

@ -0,0 +1,13 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build freebsd openbsd netbsd
package imports
import "syscall"
func direntInode(dirent *syscall.Dirent) uint64 {
return uint64(dirent.Fileno)
}

View File

@ -0,0 +1,13 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux,!appengine darwin
package imports
import "syscall"
func direntInode(dirent *syscall.Dirent) uint64 {
return uint64(dirent.Ino)
}

29
vendor/golang.org/x/tools/imports/fastwalk_portable.go generated vendored Normal file
View File

@ -0,0 +1,29 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
package imports
import (
"io/ioutil"
"os"
)
// readDir calls fn for each directory entry in dirName.
// It does not descend into directories or follow symlinks.
// If fn returns a non-nil error, readDir returns with that error
// immediately.
func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
fis, err := ioutil.ReadDir(dirName)
if err != nil {
return err
}
for _, fi := range fis {
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
return err
}
}
return nil
}

122
vendor/golang.org/x/tools/imports/fastwalk_unix.go generated vendored Normal file
View File

@ -0,0 +1,122 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux,!appengine darwin freebsd openbsd netbsd
package imports
import (
"bytes"
"fmt"
"os"
"syscall"
"unsafe"
)
const blockSize = 8 << 10
// unknownFileMode is a sentinel (and bogus) os.FileMode
// value used to represent a syscall.DT_UNKNOWN Dirent.Type.
const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
fd, err := syscall.Open(dirName, 0, 0)
if err != nil {
return err
}
defer syscall.Close(fd)
// The buffer must be at least a block long.
buf := make([]byte, blockSize) // stack-allocated; doesn't escape
bufp := 0 // starting read position in buf
nbuf := 0 // end valid data in buf
for {
if bufp >= nbuf {
bufp = 0
nbuf, err = syscall.ReadDirent(fd, buf)
if err != nil {
return os.NewSyscallError("readdirent", err)
}
if nbuf <= 0 {
return nil
}
}
consumed, name, typ := parseDirEnt(buf[bufp:nbuf])
bufp += consumed
if name == "" || name == "." || name == ".." {
continue
}
// Fallback for filesystems (like old XFS) that don't
// support Dirent.Type and have DT_UNKNOWN (0) there
// instead.
if typ == unknownFileMode {
fi, err := os.Lstat(dirName + "/" + name)
if err != nil {
// It got deleted in the meantime.
if os.IsNotExist(err) {
continue
}
return err
}
typ = fi.Mode() & os.ModeType
}
if err := fn(dirName, name, typ); err != nil {
return err
}
}
}
func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
// golang.org/issue/15653
dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0]))
if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
}
if len(buf) < int(dirent.Reclen) {
panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen))
}
consumed = int(dirent.Reclen)
if direntInode(dirent) == 0 { // File absent in directory.
return
}
switch dirent.Type {
case syscall.DT_REG:
typ = 0
case syscall.DT_DIR:
typ = os.ModeDir
case syscall.DT_LNK:
typ = os.ModeSymlink
case syscall.DT_BLK:
typ = os.ModeDevice
case syscall.DT_FIFO:
typ = os.ModeNamedPipe
case syscall.DT_SOCK:
typ = os.ModeSocket
case syscall.DT_UNKNOWN:
typ = unknownFileMode
default:
// Skip weird things.
// It's probably a DT_WHT (http://lwn.net/Articles/325369/)
// or something. Revisit if/when this package is moved outside
// of goimports. goimports only cares about regular files,
// symlinks, and directories.
return
}
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
nameLen := bytes.IndexByte(nameBuf[:], 0)
if nameLen < 0 {
panic("failed to find terminating 0 byte in dirent")
}
// Special cases for common things:
if nameLen == 1 && nameBuf[0] == '.' {
name = "."
} else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
name = ".."
} else {
name = string(nameBuf[:nameLen])
}
return
}

974
vendor/golang.org/x/tools/imports/fix.go generated vendored Normal file
View File

@ -0,0 +1,974 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package imports
import (
"bufio"
"bytes"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"sort"
"strings"
"sync"
"golang.org/x/tools/go/ast/astutil"
)
// Debug controls verbose logging.
var Debug = false
var (
inTests = false // set true by fix_test.go; if false, no need to use testMu
testMu sync.RWMutex // guards globals reset by tests; used only if inTests
)
// If set, LocalPrefix instructs Process to sort import paths with the given
// prefix into another group after 3rd-party packages.
var LocalPrefix string
// importToGroup is a list of functions which map from an import path to
// a group number.
var importToGroup = []func(importPath string) (num int, ok bool){
func(importPath string) (num int, ok bool) {
if LocalPrefix != "" && strings.HasPrefix(importPath, LocalPrefix) {
return 3, true
}
return
},
func(importPath string) (num int, ok bool) {
if strings.HasPrefix(importPath, "appengine") {
return 2, true
}
return
},
func(importPath string) (num int, ok bool) {
if strings.Contains(importPath, ".") {
return 1, true
}
return
},
}
func importGroup(importPath string) int {
for _, fn := range importToGroup {
if n, ok := fn(importPath); ok {
return n
}
}
return 0
}
// packageInfo is a summary of features found in a package.
type packageInfo struct {
Globals map[string]bool // symbol => true
}
// dirPackageInfo exposes the dirPackageInfoFile function so that it can be overridden.
var dirPackageInfo = dirPackageInfoFile
// dirPackageInfoFile gets information from other files in the package.
func dirPackageInfoFile(pkgName, srcDir, filename string) (*packageInfo, error) {
considerTests := strings.HasSuffix(filename, "_test.go")
// Handle file from stdin
if _, err := os.Stat(filename); err != nil {
if os.IsNotExist(err) {
return &packageInfo{}, nil
}
return nil, err
}
fileBase := filepath.Base(filename)
packageFileInfos, err := ioutil.ReadDir(srcDir)
if err != nil {
return nil, err
}
info := &packageInfo{Globals: make(map[string]bool)}
for _, fi := range packageFileInfos {
if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
continue
}
if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
continue
}
fileSet := token.NewFileSet()
root, err := parser.ParseFile(fileSet, filepath.Join(srcDir, fi.Name()), nil, 0)
if err != nil {
continue
}
for _, decl := range root.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
for _, spec := range genDecl.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
continue
}
info.Globals[valueSpec.Names[0].Name] = true
}
}
}
return info, nil
}
func fixImports(fset *token.FileSet, f *ast.File, filename string) (added []string, err error) {
// refs are a set of possible package references currently unsatisfied by imports.
// first key: either base package (e.g. "fmt") or renamed package
// second key: referenced package symbol (e.g. "Println")
refs := make(map[string]map[string]bool)
// decls are the current package imports. key is base package or renamed package.
decls := make(map[string]*ast.ImportSpec)
abs, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
srcDir := filepath.Dir(abs)
if Debug {
log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
}
var packageInfo *packageInfo
var loadedPackageInfo bool
// collect potential uses of packages.
var visitor visitFn
visitor = visitFn(func(node ast.Node) ast.Visitor {
if node == nil {
return visitor
}
switch v := node.(type) {
case *ast.ImportSpec:
if v.Name != nil {
decls[v.Name.Name] = v
break
}
ipath := strings.Trim(v.Path.Value, `"`)
if ipath == "C" {
break
}
local := importPathToName(ipath, srcDir)
decls[local] = v
case *ast.SelectorExpr:
xident, ok := v.X.(*ast.Ident)
if !ok {
break
}
if xident.Obj != nil {
// if the parser can resolve it, it's not a package ref
break
}
pkgName := xident.Name
if refs[pkgName] == nil {
refs[pkgName] = make(map[string]bool)
}
if !loadedPackageInfo {
loadedPackageInfo = true
packageInfo, _ = dirPackageInfo(f.Name.Name, srcDir, filename)
}
if decls[pkgName] == nil && (packageInfo == nil || !packageInfo.Globals[pkgName]) {
refs[pkgName][v.Sel.Name] = true
}
}
return visitor
})
ast.Walk(visitor, f)
// Nil out any unused ImportSpecs, to be removed in following passes
unusedImport := map[string]string{}
for pkg, is := range decls {
if refs[pkg] == nil && pkg != "_" && pkg != "." {
name := ""
if is.Name != nil {
name = is.Name.Name
}
unusedImport[strings.Trim(is.Path.Value, `"`)] = name
}
}
for ipath, name := range unusedImport {
if ipath == "C" {
// Don't remove cgo stuff.
continue
}
astutil.DeleteNamedImport(fset, f, name, ipath)
}
for pkgName, symbols := range refs {
if len(symbols) == 0 {
// skip over packages already imported
delete(refs, pkgName)
}
}
// Search for imports matching potential package references.
searches := 0
type result struct {
ipath string // import path (if err == nil)
name string // optional name to rename import as
err error
}
results := make(chan result)
for pkgName, symbols := range refs {
go func(pkgName string, symbols map[string]bool) {
ipath, rename, err := findImport(pkgName, symbols, filename)
r := result{ipath: ipath, err: err}
if rename {
r.name = pkgName
}
results <- r
}(pkgName, symbols)
searches++
}
for i := 0; i < searches; i++ {
result := <-results
if result.err != nil {
return nil, result.err
}
if result.ipath != "" {
if result.name != "" {
astutil.AddNamedImport(fset, f, result.name, result.ipath)
} else {
astutil.AddImport(fset, f, result.ipath)
}
added = append(added, result.ipath)
}
}
return added, nil
}
// importPathToName returns the package name for the given import path.
var importPathToName func(importPath, srcDir string) (packageName string) = importPathToNameGoPath
// importPathToNameBasic assumes the package name is the base of import path.
func importPathToNameBasic(importPath, srcDir string) (packageName string) {
return path.Base(importPath)
}
// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
// If there's a problem, it falls back to using importPathToNameBasic.
func importPathToNameGoPath(importPath, srcDir string) (packageName string) {
// Fast path for standard library without going to disk.
if pkg, ok := stdImportPackage[importPath]; ok {
return pkg
}
pkgName, err := importPathToNameGoPathParse(importPath, srcDir)
if Debug {
log.Printf("importPathToNameGoPathParse(%q, srcDir=%q) = %q, %v", importPath, srcDir, pkgName, err)
}
if err == nil {
return pkgName
}
return importPathToNameBasic(importPath, srcDir)
}
// importPathToNameGoPathParse is a faster version of build.Import if
// the only thing desired is the package name. It uses build.FindOnly
// to find the directory and then only parses one file in the package,
// trusting that the files in the directory are consistent.
func importPathToNameGoPathParse(importPath, srcDir string) (packageName string, err error) {
buildPkg, err := build.Import(importPath, srcDir, build.FindOnly)
if err != nil {
return "", err
}
d, err := os.Open(buildPkg.Dir)
if err != nil {
return "", err
}
names, err := d.Readdirnames(-1)
d.Close()
if err != nil {
return "", err
}
sort.Strings(names) // to have predictable behavior
var lastErr error
var nfile int
for _, name := range names {
if !strings.HasSuffix(name, ".go") {
continue
}
if strings.HasSuffix(name, "_test.go") {
continue
}
nfile++
fullFile := filepath.Join(buildPkg.Dir, name)
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
if err != nil {
lastErr = err
continue
}
pkgName := f.Name.Name
if pkgName == "documentation" {
// Special case from go/build.ImportDir, not
// handled by ctx.MatchFile.
continue
}
if pkgName == "main" {
// Also skip package main, assuming it's a +build ignore generator or example.
// Since you can't import a package main anyway, there's no harm here.
continue
}
return pkgName, nil
}
if lastErr != nil {
return "", lastErr
}
return "", fmt.Errorf("no importable package found in %d Go files", nfile)
}
var stdImportPackage = map[string]string{} // "net/http" => "http"
func init() {
// Nothing in the standard library has a package name not
// matching its import base name.
for _, pkg := range stdlib {
if _, ok := stdImportPackage[pkg]; !ok {
stdImportPackage[pkg] = path.Base(pkg)
}
}
}
// Directory-scanning state.
var (
// scanGoRootOnce guards calling scanGoRoot (for $GOROOT)
scanGoRootOnce sync.Once
// scanGoPathOnce guards calling scanGoPath (for $GOPATH)
scanGoPathOnce sync.Once
// populateIgnoreOnce guards calling populateIgnore
populateIgnoreOnce sync.Once
ignoredDirs []os.FileInfo
dirScanMu sync.RWMutex
dirScan map[string]*pkg // abs dir path => *pkg
)
type pkg struct {
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPath string // full pkg import path ("net/http", "foo/bar/vendor/a/b")
importPathShort string // vendorless import path ("net/http", "a/b")
}
// byImportPathShortLength sorts by the short import path length, breaking ties on the
// import string itself.
type byImportPathShortLength []*pkg
func (s byImportPathShortLength) Len() int { return len(s) }
func (s byImportPathShortLength) Less(i, j int) bool {
vi, vj := s[i].importPathShort, s[j].importPathShort
return len(vi) < len(vj) || (len(vi) == len(vj) && vi < vj)
}
func (s byImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
// guarded by populateIgnoreOnce; populates ignoredDirs.
func populateIgnore() {
for _, srcDir := range build.Default.SrcDirs() {
if srcDir == filepath.Join(build.Default.GOROOT, "src") {
continue
}
populateIgnoredDirs(srcDir)
}
}
// populateIgnoredDirs reads an optional config file at <path>/.goimportsignore
// of relative directories to ignore when scanning for go files.
// The provided path is one of the $GOPATH entries with "src" appended.
func populateIgnoredDirs(path string) {
file := filepath.Join(path, ".goimportsignore")
slurp, err := ioutil.ReadFile(file)
if Debug {
if err != nil {
log.Print(err)
} else {
log.Printf("Read %s", file)
}
}
if err != nil {
return
}
bs := bufio.NewScanner(bytes.NewReader(slurp))
for bs.Scan() {
line := strings.TrimSpace(bs.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue
}
full := filepath.Join(path, line)
if fi, err := os.Stat(full); err == nil {
ignoredDirs = append(ignoredDirs, fi)
if Debug {
log.Printf("Directory added to ignore list: %s", full)
}
} else if Debug {
log.Printf("Error statting entry in .goimportsignore: %v", err)
}
}
}
func skipDir(fi os.FileInfo) bool {
for _, ignoredDir := range ignoredDirs {
if os.SameFile(fi, ignoredDir) {
return true
}
}
return false
}
// shouldTraverse reports whether the symlink fi should, found in dir,
// should be followed. It makes sure symlinks were never visited
// before to avoid symlink loops.
func shouldTraverse(dir string, fi os.FileInfo) bool {
path := filepath.Join(dir, fi.Name())
target, err := filepath.EvalSymlinks(path)
if err != nil {
if !os.IsNotExist(err) {
fmt.Fprintln(os.Stderr, err)
}
return false
}
ts, err := os.Stat(target)
if err != nil {
fmt.Fprintln(os.Stderr, err)
return false
}
if !ts.IsDir() {
return false
}
if skipDir(ts) {
return false
}
// Check for symlink loops by statting each directory component
// and seeing if any are the same file as ts.
for {
parent := filepath.Dir(path)
if parent == path {
// Made it to the root without seeing a cycle.
// Use this symlink.
return true
}
parentInfo, err := os.Stat(parent)
if err != nil {
return false
}
if os.SameFile(ts, parentInfo) {
// Cycle. Don't traverse.
return false
}
path = parent
}
}
var testHookScanDir = func(dir string) {}
var scanGoRootDone = make(chan struct{}) // closed when scanGoRoot is done
func scanGoRoot() {
go func() {
scanGoDirs(true)
close(scanGoRootDone)
}()
}
func scanGoPath() { scanGoDirs(false) }
func scanGoDirs(goRoot bool) {
if Debug {
which := "$GOROOT"
if !goRoot {
which = "$GOPATH"
}
log.Printf("scanning " + which)
defer log.Printf("scanned " + which)
}
dirScanMu.Lock()
if dirScan == nil {
dirScan = make(map[string]*pkg)
}
dirScanMu.Unlock()
for _, srcDir := range build.Default.SrcDirs() {
isGoroot := srcDir == filepath.Join(build.Default.GOROOT, "src")
if isGoroot != goRoot {
continue
}
testHookScanDir(srcDir)
walkFn := func(path string, typ os.FileMode) error {
dir := filepath.Dir(path)
if typ.IsRegular() {
if dir == srcDir {
// Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src.
return nil
}
if !strings.HasSuffix(path, ".go") {
return nil
}
dirScanMu.Lock()
if _, dup := dirScan[dir]; !dup {
importpath := filepath.ToSlash(dir[len(srcDir)+len("/"):])
dirScan[dir] = &pkg{
importPath: importpath,
importPathShort: vendorlessImportPath(importpath),
dir: dir,
}
}
dirScanMu.Unlock()
return nil
}
if typ == os.ModeDir {
base := filepath.Base(path)
if base == "" || base[0] == '.' || base[0] == '_' ||
base == "testdata" || base == "node_modules" {
return filepath.SkipDir
}
fi, err := os.Lstat(path)
if err == nil && skipDir(fi) {
if Debug {
log.Printf("skipping directory %q under %s", fi.Name(), dir)
}
return filepath.SkipDir
}
return nil
}
if typ == os.ModeSymlink {
base := filepath.Base(path)
if strings.HasPrefix(base, ".#") {
// Emacs noise.
return nil
}
fi, err := os.Lstat(path)
if err != nil {
// Just ignore it.
return nil
}
if shouldTraverse(dir, fi) {
return traverseLink
}
}
return nil
}
if err := fastWalk(srcDir, walkFn); err != nil {
log.Printf("goimports: scanning directory %v: %v", srcDir, err)
}
}
}
// vendorlessImportPath returns the devendorized version of the provided import path.
// e.g. "foo/bar/vendor/a/b" => "a/b"
func vendorlessImportPath(ipath string) string {
// Devendorize for use in import statement.
if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
return ipath[i+len("/vendor/"):]
}
if strings.HasPrefix(ipath, "vendor/") {
return ipath[len("vendor/"):]
}
return ipath
}
// loadExports returns the set of exported symbols in the package at dir.
// It returns nil on error or if the package name in dir does not match expectPackage.
var loadExports func(expectPackage, dir string) map[string]bool = loadExportsGoPath
func loadExportsGoPath(expectPackage, dir string) map[string]bool {
if Debug {
log.Printf("loading exports in dir %s (seeking package %s)", dir, expectPackage)
}
exports := make(map[string]bool)
ctx := build.Default
// ReadDir is like ioutil.ReadDir, but only returns *.go files
// and filters out _test.go files since they're not relevant
// and only slow things down.
ctx.ReadDir = func(dir string) (notTests []os.FileInfo, err error) {
all, err := ioutil.ReadDir(dir)
if err != nil {
return nil, err
}
notTests = all[:0]
for _, fi := range all {
name := fi.Name()
if strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") {
notTests = append(notTests, fi)
}
}
return notTests, nil
}
files, err := ctx.ReadDir(dir)
if err != nil {
log.Print(err)
return nil
}
fset := token.NewFileSet()
for _, fi := range files {
match, err := ctx.MatchFile(dir, fi.Name())
if err != nil || !match {
continue
}
fullFile := filepath.Join(dir, fi.Name())
f, err := parser.ParseFile(fset, fullFile, nil, 0)
if err != nil {
if Debug {
log.Printf("Parsing %s: %v", fullFile, err)
}
return nil
}
pkgName := f.Name.Name
if pkgName == "documentation" {
// Special case from go/build.ImportDir, not
// handled by ctx.MatchFile.
continue
}
if pkgName != expectPackage {
if Debug {
log.Printf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName)
}
return nil
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
if Debug {
exportList := make([]string, 0, len(exports))
for k := range exports {
exportList = append(exportList, k)
}
sort.Strings(exportList)
log.Printf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", "))
}
return exports
}
// findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil)
//
// This is declared as a variable rather than a function so goimports
// can be easily extended by adding a file with an init function.
//
// The rename value tells goimports whether to use the package name as
// a local qualifier in an import. For example, if findImports("pkg",
// "X") returns ("foo/bar", rename=true), then goimports adds the
// import line:
// import pkg "foo/bar"
// to satisfy uses of pkg.X in the file.
var findImport func(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) = findImportGoPath
// findImportGoPath is the normal implementation of findImport.
// (Some companies have their own internally.)
func findImportGoPath(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) {
if inTests {
testMu.RLock()
defer testMu.RUnlock()
}
// Fast path for the standard library.
// In the common case we hopefully never have to scan the GOPATH, which can
// be slow with moving disks.
if pkg, rename, ok := findImportStdlib(pkgName, symbols); ok {
return pkg, rename, nil
}
if pkgName == "rand" && symbols["Read"] {
// Special-case rand.Read.
//
// If findImportStdlib didn't find it above, don't go
// searching for it, lest it find and pick math/rand
// in GOROOT (new as of Go 1.6)
//
// crypto/rand is the safer choice.
return "", false, nil
}
// TODO(sameer): look at the import lines for other Go files in the
// local directory, since the user is likely to import the same packages
// in the current Go file. Return rename=true when the other Go files
// use a renamed package that's also used in the current file.
// Read all the $GOPATH/src/.goimportsignore files before scanning directories.
populateIgnoreOnce.Do(populateIgnore)
// Start scanning the $GOROOT asynchronously, then run the
// GOPATH scan synchronously if needed, and then wait for the
// $GOROOT to finish.
//
// TODO(bradfitz): run each $GOPATH entry async. But nobody
// really has more than one anyway, so low priority.
scanGoRootOnce.Do(scanGoRoot) // async
if !fileInDir(filename, build.Default.GOROOT) {
scanGoPathOnce.Do(scanGoPath) // blocking
}
<-scanGoRootDone
// Find candidate packages, looking only at their directory names first.
var candidates []*pkg
for _, pkg := range dirScan {
if pkgIsCandidate(filename, pkgName, pkg) {
candidates = append(candidates, pkg)
}
}
// Sort the candidates by their import package length,
// assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so
// there's no "penalty" for vendoring.
sort.Sort(byImportPathShortLength(candidates))
if Debug {
for i, pkg := range candidates {
log.Printf("%s candidate %d/%d: %v", pkgName, i+1, len(candidates), pkg.importPathShort)
}
}
// Collect exports for packages with matching names.
done := make(chan struct{}) // closed when we find the answer
defer close(done)
rescv := make([]chan *pkg, len(candidates))
for i := range candidates {
rescv[i] = make(chan *pkg)
}
const maxConcurrentPackageImport = 4
loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
go func() {
for i, pkg := range candidates {
select {
case loadExportsSem <- struct{}{}:
select {
case <-done:
return
default:
}
case <-done:
return
}
pkg := pkg
resc := rescv[i]
go func() {
if inTests {
testMu.RLock()
defer testMu.RUnlock()
}
defer func() { <-loadExportsSem }()
exports := loadExports(pkgName, pkg.dir)
// If it doesn't have the right
// symbols, send nil to mean no match.
for symbol := range symbols {
if !exports[symbol] {
pkg = nil
break
}
}
select {
case resc <- pkg:
case <-done:
}
}()
}
}()
for _, resc := range rescv {
pkg := <-resc
if pkg == nil {
continue
}
// If the package name in the source doesn't match the import path's base,
// return true so the rewriter adds a name (import foo "github.com/bar/go-foo")
needsRename := path.Base(pkg.importPath) != pkgName
return pkg.importPathShort, needsRename, nil
}
return "", false, nil
}
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
// finding which package pkgIdent in the file named by filename is trying
// to refer to.
//
// This check is purely lexical and is meant to be as fast as possible
// because it's run over all $GOPATH directories to filter out poor
// candidates in order to limit the CPU and I/O later parsing the
// exports in candidate packages.
//
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
}
// Speed optimization to minimize disk I/O:
// the last two components on disk must contain the
// package name somewhere.
//
// This permits mismatch naming like directory
// "go-foo" being package "foo", or "pkg.v3" being "pkg",
// or directory "google.golang.org/api/cloudbilling/v1"
// being package "cloudbilling", but doesn't
// permit a directory "foo" to be package
// "bar", which is strongly discouraged
// anyway. There's no reason goimports needs
// to be slow just to accomodate that.
lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
}
return false
}
func hasHyphenOrUpperASCII(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if b == '-' || ('A' <= b && b <= 'Z') {
return true
}
}
return false
}
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
buf := make([]byte, 0, len(s))
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case b == '-':
continue
case 'A' <= b && b <= 'Z':
buf = append(buf, b+('a'-'A'))
default:
buf = append(buf, b)
}
}
return string(buf)
}
// canUse reports whether the package in dir is usable from filename,
// respecting the Go "internal" and "vendor" visibility rules.
func canUse(filename, dir string) bool {
// Fast path check, before any allocations. If it doesn't contain vendor
// or internal, it's not tricky:
// Note that this can false-negative on directories like "notinternal",
// but we check it correctly below. This is just a fast path.
if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
return true
}
dirSlash := filepath.ToSlash(dir)
if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
return true
}
// Vendor or internal directory only visible from children of parent.
// That means the path from the current directory to the target directory
// can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
// or bar/vendor or bar/internal.
// After stripping all the leading ../, the only okay place to see vendor or internal
// is at the very beginning of the path.
absfile, err := filepath.Abs(filename)
if err != nil {
return false
}
absdir, err := filepath.Abs(dir)
if err != nil {
return false
}
rel, err := filepath.Rel(absfile, absdir)
if err != nil {
return false
}
relSlash := filepath.ToSlash(rel)
if i := strings.LastIndex(relSlash, "../"); i >= 0 {
relSlash = relSlash[i+len("../"):]
}
return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
}
// lastTwoComponents returns at most the last two path components
// of v, using either / or \ as the path separator.
func lastTwoComponents(v string) string {
nslash := 0
for i := len(v) - 1; i >= 0; i-- {
if v[i] == '/' || v[i] == '\\' {
nslash++
if nslash == 2 {
return v[i:]
}
}
}
return v
}
type visitFn func(node ast.Node) ast.Visitor
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
return fn(node)
}
func findImportStdlib(shortPkg string, symbols map[string]bool) (importPath string, rename, ok bool) {
for symbol := range symbols {
key := shortPkg + "." + symbol
path := stdlib[key]
if path == "" {
if key == "rand.Read" {
continue
}
return "", false, false
}
if importPath != "" && importPath != path {
// Ambiguous. Symbols pointed to different things.
return "", false, false
}
importPath = path
}
if importPath == "" && shortPkg == "rand" && symbols["Read"] {
return "crypto/rand", false, true
}
return importPath, false, importPath != ""
}
// fileInDir reports whether the provided file path looks like
// it's in dir. (without hitting the filesystem)
func fileInDir(file, dir string) bool {
rest := strings.TrimPrefix(file, dir)
if len(rest) == len(file) {
// dir is not a prefix of file.
return false
}
// Check for boundary: either nothing (file == dir), or a slash.
return len(rest) == 0 || rest[0] == '/' || rest[0] == '\\'
}

289
vendor/golang.org/x/tools/imports/imports.go generated vendored Normal file
View File

@ -0,0 +1,289 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkstdlib.go
// Package imports implements a Go pretty-printer (like package "go/format")
// that also adds or removes import statements as necessary.
package imports
import (
"bufio"
"bytes"
"fmt"
"go/ast"
"go/format"
"go/parser"
"go/printer"
"go/token"
"io"
"regexp"
"strconv"
"strings"
"golang.org/x/tools/go/ast/astutil"
)
// Options specifies options for processing files.
type Options struct {
Fragment bool // Accept fragment of a source file (no package statement)
AllErrors bool // Report all errors (not just the first 10 on different lines)
Comments bool // Print comments (true if nil *Options provided)
TabIndent bool // Use tabs for indent (true if nil *Options provided)
TabWidth int // Tab width (8 if nil *Options provided)
FormatOnly bool // Disable the insertion and deletion of imports
}
// Process formats and adjusts imports for the provided file.
// If opt is nil the defaults are used.
//
// Note that filename's directory influences which imports can be chosen,
// so it is important that filename be accurate.
// To process data ``as if'' it were in filename, pass the data as a non-nil src.
func Process(filename string, src []byte, opt *Options) ([]byte, error) {
if opt == nil {
opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
}
fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt)
if err != nil {
return nil, err
}
if !opt.FormatOnly {
_, err = fixImports(fileSet, file, filename)
if err != nil {
return nil, err
}
}
sortImports(fileSet, file)
imps := astutil.Imports(fileSet, file)
var spacesBefore []string // import paths we need spaces before
for _, impSection := range imps {
// Within each block of contiguous imports, see if any
// import lines are in different group numbers. If so,
// we'll need to put a space between them so it's
// compatible with gofmt.
lastGroup := -1
for _, importSpec := range impSection {
importPath, _ := strconv.Unquote(importSpec.Path.Value)
groupNum := importGroup(importPath)
if groupNum != lastGroup && lastGroup != -1 {
spacesBefore = append(spacesBefore, importPath)
}
lastGroup = groupNum
}
}
printerMode := printer.UseSpaces
if opt.TabIndent {
printerMode |= printer.TabIndent
}
printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
var buf bytes.Buffer
err = printConfig.Fprint(&buf, fileSet, file)
if err != nil {
return nil, err
}
out := buf.Bytes()
if adjust != nil {
out = adjust(src, out)
}
if len(spacesBefore) > 0 {
out = addImportSpaces(bytes.NewReader(out), spacesBefore)
}
out, err = format.Source(out)
if err != nil {
return nil, err
}
return out, nil
}
// parse parses src, which was read from filename,
// as a Go source file or statement list.
func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
parserMode := parser.Mode(0)
if opt.Comments {
parserMode |= parser.ParseComments
}
if opt.AllErrors {
parserMode |= parser.AllErrors
}
// Try as whole source file.
file, err := parser.ParseFile(fset, filename, src, parserMode)
if err == nil {
return file, nil, nil
}
// If the error is that the source file didn't begin with a
// package line and we accept fragmented input, fall through to
// try as a source fragment. Stop and return on any other error.
if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
return nil, nil, err
}
// If this is a declaration list, make it a source file
// by inserting a package clause.
// Insert using a ;, not a newline, so that the line numbers
// in psrc match the ones in src.
psrc := append([]byte("package main;"), src...)
file, err = parser.ParseFile(fset, filename, psrc, parserMode)
if err == nil {
// If a main function exists, we will assume this is a main
// package and leave the file.
if containsMainFunc(file) {
return file, nil, nil
}
adjust := func(orig, src []byte) []byte {
// Remove the package clause.
// Gofmt has turned the ; into a \n.
src = src[len("package main\n"):]
return matchSpace(orig, src)
}
return file, adjust, nil
}
// If the error is that the source file didn't begin with a
// declaration, fall through to try as a statement list.
// Stop and return on any other error.
if !strings.Contains(err.Error(), "expected declaration") {
return nil, nil, err
}
// If this is a statement list, make it a source file
// by inserting a package clause and turning the list
// into a function body. This handles expressions too.
// Insert using a ;, not a newline, so that the line numbers
// in fsrc match the ones in src.
fsrc := append(append([]byte("package p; func _() {"), src...), '}')
file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
if err == nil {
adjust := func(orig, src []byte) []byte {
// Remove the wrapping.
// Gofmt has turned the ; into a \n\n.
src = src[len("package p\n\nfunc _() {"):]
src = src[:len(src)-len("}\n")]
// Gofmt has also indented the function body one level.
// Remove that indent.
src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
return matchSpace(orig, src)
}
return file, adjust, nil
}
// Failed, and out of options.
return nil, nil, err
}
// containsMainFunc checks if a file contains a function declaration with the
// function signature 'func main()'
func containsMainFunc(file *ast.File) bool {
for _, decl := range file.Decls {
if f, ok := decl.(*ast.FuncDecl); ok {
if f.Name.Name != "main" {
continue
}
if len(f.Type.Params.List) != 0 {
continue
}
if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
continue
}
return true
}
}
return false
}
func cutSpace(b []byte) (before, middle, after []byte) {
i := 0
for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
i++
}
j := len(b)
for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
j--
}
if i <= j {
return b[:i], b[i:j], b[j:]
}
return nil, nil, b[j:]
}
// matchSpace reformats src to use the same space context as orig.
// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
// 2) matchSpace copies the indentation of the first non-blank line in orig
// to every non-blank line in src.
// 3) matchSpace copies the trailing space from orig and uses it in place
// of src's trailing space.
func matchSpace(orig []byte, src []byte) []byte {
before, _, after := cutSpace(orig)
i := bytes.LastIndex(before, []byte{'\n'})
before, indent := before[:i+1], before[i+1:]
_, src, _ = cutSpace(src)
var b bytes.Buffer
b.Write(before)
for len(src) > 0 {
line := src
if i := bytes.IndexByte(line, '\n'); i >= 0 {
line, src = line[:i+1], line[i+1:]
} else {
src = nil
}
if len(line) > 0 && line[0] != '\n' { // not blank
b.Write(indent)
}
b.Write(line)
}
b.Write(after)
return b.Bytes()
}
var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
func addImportSpaces(r io.Reader, breaks []string) []byte {
var out bytes.Buffer
sc := bufio.NewScanner(r)
inImports := false
done := false
for sc.Scan() {
s := sc.Text()
if !inImports && !done && strings.HasPrefix(s, "import") {
inImports = true
}
if inImports && (strings.HasPrefix(s, "var") ||
strings.HasPrefix(s, "func") ||
strings.HasPrefix(s, "const") ||
strings.HasPrefix(s, "type")) {
done = true
inImports = false
}
if inImports && len(breaks) > 0 {
if m := impLine.FindStringSubmatch(s); m != nil {
if m[1] == breaks[0] {
out.WriteByte('\n')
breaks = breaks[1:]
}
}
}
fmt.Fprintln(&out, s)
}
return out.Bytes()
}

173
vendor/golang.org/x/tools/imports/mkindex.go generated vendored Normal file
View File

@ -0,0 +1,173 @@
// +build ignore
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Command mkindex creates the file "pkgindex.go" containing an index of the Go
// standard library. The file is intended to be built as part of the imports
// package, so that the package may be used in environments where a GOROOT is
// not available (such as App Engine).
package main
import (
"bytes"
"fmt"
"go/ast"
"go/build"
"go/format"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"strings"
)
var (
pkgIndex = make(map[string][]pkg)
exports = make(map[string]map[string]bool)
)
func main() {
// Don't use GOPATH.
ctx := build.Default
ctx.GOPATH = ""
// Populate pkgIndex global from GOROOT.
for _, path := range ctx.SrcDirs() {
f, err := os.Open(path)
if err != nil {
log.Print(err)
continue
}
children, err := f.Readdir(-1)
f.Close()
if err != nil {
log.Print(err)
continue
}
for _, child := range children {
if child.IsDir() {
loadPkg(path, child.Name())
}
}
}
// Populate exports global.
for _, ps := range pkgIndex {
for _, p := range ps {
e := loadExports(p.dir)
if e != nil {
exports[p.dir] = e
}
}
}
// Construct source file.
var buf bytes.Buffer
fmt.Fprint(&buf, pkgIndexHead)
fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex)
fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports)
src := buf.Bytes()
// Replace main.pkg type name with pkg.
src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1)
// Replace actual GOROOT with "/go".
src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1)
// Add some line wrapping.
src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1)
src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1)
var err error
src, err = format.Source(src)
if err != nil {
log.Fatal(err)
}
// Write out source file.
err = ioutil.WriteFile("pkgindex.go", src, 0644)
if err != nil {
log.Fatal(err)
}
}
const pkgIndexHead = `package imports
func init() {
pkgIndexOnce.Do(func() {
pkgIndex.m = pkgIndexMaster
})
loadExports = func(dir string) map[string]bool {
return exportsMaster[dir]
}
}
`
type pkg struct {
importpath string // full pkg import path, e.g. "net/http"
dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
}
var fset = token.NewFileSet()
func loadPkg(root, importpath string) {
shortName := path.Base(importpath)
if shortName == "testdata" {
return
}
dir := filepath.Join(root, importpath)
pkgIndex[shortName] = append(pkgIndex[shortName], pkg{
importpath: importpath,
dir: dir,
})
pkgDir, err := os.Open(dir)
if err != nil {
return
}
children, err := pkgDir.Readdir(-1)
pkgDir.Close()
if err != nil {
return
}
for _, child := range children {
name := child.Name()
if name == "" {
continue
}
if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
continue
}
if child.IsDir() {
loadPkg(root, filepath.Join(importpath, name))
}
}
}
func loadExports(dir string) map[string]bool {
exports := make(map[string]bool)
buildPkg, err := build.ImportDir(dir, 0)
if err != nil {
if strings.Contains(err.Error(), "no buildable Go source files in") {
return nil
}
log.Printf("could not import %q: %v", dir, err)
return nil
}
for _, file := range buildPkg.GoFiles {
f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
if err != nil {
log.Printf("could not parse %q: %v", file, err)
continue
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
return exports
}

104
vendor/golang.org/x/tools/imports/mkstdlib.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
// +build ignore
// mkstdlib generates the zstdlib.go file, containing the Go standard
// library API symbols. It's baked into the binary to avoid scanning
// GOPATH in the common case.
package main
import (
"bufio"
"bytes"
"fmt"
"go/format"
"io"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"regexp"
"sort"
"strings"
)
func mustOpen(name string) io.Reader {
f, err := os.Open(name)
if err != nil {
log.Fatal(err)
}
return f
}
func api(base string) string {
return filepath.Join(os.Getenv("GOROOT"), "api", base)
}
var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`)
func main() {
var buf bytes.Buffer
outf := func(format string, args ...interface{}) {
fmt.Fprintf(&buf, format, args...)
}
outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n")
outf("package imports\n")
outf("var stdlib = map[string]string{\n")
f := io.MultiReader(
mustOpen(api("go1.txt")),
mustOpen(api("go1.1.txt")),
mustOpen(api("go1.2.txt")),
mustOpen(api("go1.3.txt")),
mustOpen(api("go1.4.txt")),
mustOpen(api("go1.5.txt")),
mustOpen(api("go1.6.txt")),
mustOpen(api("go1.7.txt")),
mustOpen(api("go1.8.txt")),
)
sc := bufio.NewScanner(f)
fullImport := map[string]string{} // "zip.NewReader" => "archive/zip"
ambiguous := map[string]bool{}
var keys []string
for sc.Scan() {
l := sc.Text()
has := func(v string) bool { return strings.Contains(l, v) }
if has("struct, ") || has("interface, ") || has(", method (") {
continue
}
if m := sym.FindStringSubmatch(l); m != nil {
full := m[1]
key := path.Base(full) + "." + m[2]
if exist, ok := fullImport[key]; ok {
if exist != full {
ambiguous[key] = true
}
} else {
fullImport[key] = full
keys = append(keys, key)
}
}
}
if err := sc.Err(); err != nil {
log.Fatal(err)
}
sort.Strings(keys)
for _, key := range keys {
if ambiguous[key] {
outf("\t// %q is ambiguous\n", key)
} else {
outf("\t%q: %q,\n", key, fullImport[key])
}
}
outf("\n")
for _, sym := range [...]string{"Alignof", "ArbitraryType", "Offsetof", "Pointer", "Sizeof"} {
outf("\t%q: %q,\n", "unsafe."+sym, "unsafe")
}
outf("}\n")
fmtbuf, err := format.Source(buf.Bytes())
if err != nil {
log.Fatal(err)
}
err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666)
if err != nil {
log.Fatal(err)
}
}

212
vendor/golang.org/x/tools/imports/sortimports.go generated vendored Normal file
View File

@ -0,0 +1,212 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Hacked up copy of go/ast/import.go
package imports
import (
"go/ast"
"go/token"
"sort"
"strconv"
)
// sortImports sorts runs of consecutive import lines in import blocks in f.
// It also removes duplicate imports when it is possible to do so without data loss.
func sortImports(fset *token.FileSet, f *ast.File) {
for i, d := range f.Decls {
d, ok := d.(*ast.GenDecl)
if !ok || d.Tok != token.IMPORT {
// Not an import declaration, so we're done.
// Imports are always first.
break
}
if len(d.Specs) == 0 {
// Empty import block, remove it.
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
}
if !d.Lparen.IsValid() {
// Not a block: sorted by default.
continue
}
// Identify and sort runs of specs on successive lines.
i := 0
specs := d.Specs[:0]
for j, s := range d.Specs {
if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
// j begins a new run. End this one.
specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
i = j
}
}
specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
d.Specs = specs
// Deduping can leave a blank line before the rparen; clean that up.
if len(d.Specs) > 0 {
lastSpec := d.Specs[len(d.Specs)-1]
lastLine := fset.Position(lastSpec.Pos()).Line
if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
fset.File(d.Rparen).MergeLine(rParenLine - 1)
}
}
}
}
func importPath(s ast.Spec) string {
t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
if err == nil {
return t
}
return ""
}
func importName(s ast.Spec) string {
n := s.(*ast.ImportSpec).Name
if n == nil {
return ""
}
return n.Name
}
func importComment(s ast.Spec) string {
c := s.(*ast.ImportSpec).Comment
if c == nil {
return ""
}
return c.Text()
}
// collapse indicates whether prev may be removed, leaving only next.
func collapse(prev, next ast.Spec) bool {
if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
return false
}
return prev.(*ast.ImportSpec).Comment == nil
}
type posSpan struct {
Start token.Pos
End token.Pos
}
func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
// Can't short-circuit here even if specs are already sorted,
// since they might yet need deduplication.
// A lone import, however, may be safely ignored.
if len(specs) <= 1 {
return specs
}
// Record positions for specs.
pos := make([]posSpan, len(specs))
for i, s := range specs {
pos[i] = posSpan{s.Pos(), s.End()}
}
// Identify comments in this range.
// Any comment from pos[0].Start to the final line counts.
lastLine := fset.Position(pos[len(pos)-1].End).Line
cstart := len(f.Comments)
cend := len(f.Comments)
for i, g := range f.Comments {
if g.Pos() < pos[0].Start {
continue
}
if i < cstart {
cstart = i
}
if fset.Position(g.End()).Line > lastLine {
cend = i
break
}
}
comments := f.Comments[cstart:cend]
// Assign each comment to the import spec preceding it.
importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
specIndex := 0
for _, g := range comments {
for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
specIndex++
}
s := specs[specIndex].(*ast.ImportSpec)
importComment[s] = append(importComment[s], g)
}
// Sort the import specs by import path.
// Remove duplicates, when possible without data loss.
// Reassign the import paths to have the same position sequence.
// Reassign each comment to abut the end of its spec.
// Sort the comments by new position.
sort.Sort(byImportSpec(specs))
// Dedup. Thanks to our sorting, we can just consider
// adjacent pairs of imports.
deduped := specs[:0]
for i, s := range specs {
if i == len(specs)-1 || !collapse(s, specs[i+1]) {
deduped = append(deduped, s)
} else {
p := s.Pos()
fset.File(p).MergeLine(fset.Position(p).Line)
}
}
specs = deduped
// Fix up comment positions
for i, s := range specs {
s := s.(*ast.ImportSpec)
if s.Name != nil {
s.Name.NamePos = pos[i].Start
}
s.Path.ValuePos = pos[i].Start
s.EndPos = pos[i].End
for _, g := range importComment[s] {
for _, c := range g.List {
c.Slash = pos[i].End
}
}
}
sort.Sort(byCommentPos(comments))
return specs
}
type byImportSpec []ast.Spec // slice of *ast.ImportSpec
func (x byImportSpec) Len() int { return len(x) }
func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byImportSpec) Less(i, j int) bool {
ipath := importPath(x[i])
jpath := importPath(x[j])
igroup := importGroup(ipath)
jgroup := importGroup(jpath)
if igroup != jgroup {
return igroup < jgroup
}
if ipath != jpath {
return ipath < jpath
}
iname := importName(x[i])
jname := importName(x[j])
if iname != jname {
return iname < jname
}
return importComment(x[i]) < importComment(x[j])
}
type byCommentPos []*ast.CommentGroup
func (x byCommentPos) Len() int { return len(x) }
func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }

9376
vendor/golang.org/x/tools/imports/zstdlib.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -16,6 +16,7 @@ go_library(
visibility = ["//visibility:public"],
deps = [
"//vendor/github.com/golang/glog:go_default_library",
"//vendor/golang.org/x/tools/imports:go_default_library",
"//vendor/k8s.io/gengo/namer:go_default_library",
"//vendor/k8s.io/gengo/parser:go_default_library",
"//vendor/k8s.io/gengo/types:go_default_library",

View File

@ -19,13 +19,13 @@ package generator
import (
"bytes"
"fmt"
"go/format"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"golang.org/x/tools/imports"
"k8s.io/gengo/namer"
"k8s.io/gengo/types"
@ -131,7 +131,6 @@ func assembleGolangFile(w io.Writer, f *File) {
if len(f.Imports) > 0 {
fmt.Fprint(w, "import (\n")
// TODO: sort imports like goimports does.
for i := range f.Imports {
if strings.Contains(i, "\"") {
// they included quotes, or are using the
@ -159,9 +158,13 @@ func assembleGolangFile(w io.Writer, f *File) {
w.Write(f.Body.Bytes())
}
func importsWrapper(src []byte) ([]byte, error) {
return imports.Process("", src, nil)
}
func NewGolangFile() *DefaultFileType {
return &DefaultFileType{
Format: format.Source,
Format: importsWrapper,
Assemble: assembleGolangFile,
}
}