1
0
mirror of https://github.com/prometheus-community/postgres_exporter.git synced 2025-08-06 17:22:43 +03:00

Add cross-compilation Makefile targets and tar-based releases.

Revamp the build system to be more inline with other Prometheus exporters.
Notably add Darwin and Windows build targets, and add support for releases
using tar files.
This commit is contained in:
Will Rouesnel
2017-11-30 03:15:53 +11:00
parent 61b93a17a6
commit 5b9fea01ee
98 changed files with 10599 additions and 1487 deletions

2
.gitignore vendored
View File

@@ -9,5 +9,7 @@ postgres_exporter_integration_test
cover.out cover.out
cover.*.out cover.*.out
.coverage .coverage
bin
release
*.prom *.prom
.metrics.*.*.prom .metrics.*.*.prom

View File

@@ -18,6 +18,7 @@ script:
- make docker - make docker
- make test-integration - make test-integration
- make cover.out - make cover.out
- make release
- $HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci - $HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci
after_success: after_success:
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS - docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
@@ -38,7 +39,8 @@ deploy:
provider: releases provider: releases
api_key: api_key:
secure: rwlge/Rs3wnWyfKRhD9fd5GviVe0foYUp20DY3AjKdDjhtwScA1EeR9QHOkB3raze52en0+KkpqlLCWbt3q4CRT7+ku1DNKhd6VWALdTZ1RPJYvNlU6CKJdRnWUJsECmSBsShXlbiYR8axqNVedzFPFGKzS9gYlFN6rr7pez/JZhxqucopZ6I+TkRHMELrFXyQK7/Y2bNRCLC4a+rGsjKeLLtYXbRXCmS0G4BSJEBRk7d69fIRzBApCMfrcLftgHzPuPth616yyUusQSCQYvaZ5tlwrPP8/E0wG3SVJVeDCMuDOSBZ9M6vNzR8W8VR/hxQamegn1OQgC5kNOaLZCTcJ5xguRouqb+FNFBqrd/Zi6vESo7RiVLULawzwxkh9sIPa3WZYDb3VK/Z/cpggUeR7wAu0S5ZYEvJHRefIZpqofZEHzDE3Blqp5yErz05e/zmjpd6HHK3f/UHmRRYfbulkvGT3aL/dlq5GcFvuxVC/vTL2VPvg9cGbqtf7PakC5IhoHpDs35tOyLxifOBLHvkwtGSxEfsCohIG8Hz2XFD83EsxgOiKSXVPLNd6yxjdqZj7OeAKFFU3bzGndnRbDIXaf987IN1imgUtP6wegfImoRStqxN4gEwwIMFsZCF86Ug4eLhlajLbWhudriDxDPBM/F9950aVxLwmWh9l5cRI= secure: rwlge/Rs3wnWyfKRhD9fd5GviVe0foYUp20DY3AjKdDjhtwScA1EeR9QHOkB3raze52en0+KkpqlLCWbt3q4CRT7+ku1DNKhd6VWALdTZ1RPJYvNlU6CKJdRnWUJsECmSBsShXlbiYR8axqNVedzFPFGKzS9gYlFN6rr7pez/JZhxqucopZ6I+TkRHMELrFXyQK7/Y2bNRCLC4a+rGsjKeLLtYXbRXCmS0G4BSJEBRk7d69fIRzBApCMfrcLftgHzPuPth616yyUusQSCQYvaZ5tlwrPP8/E0wG3SVJVeDCMuDOSBZ9M6vNzR8W8VR/hxQamegn1OQgC5kNOaLZCTcJ5xguRouqb+FNFBqrd/Zi6vESo7RiVLULawzwxkh9sIPa3WZYDb3VK/Z/cpggUeR7wAu0S5ZYEvJHRefIZpqofZEHzDE3Blqp5yErz05e/zmjpd6HHK3f/UHmRRYfbulkvGT3aL/dlq5GcFvuxVC/vTL2VPvg9cGbqtf7PakC5IhoHpDs35tOyLxifOBLHvkwtGSxEfsCohIG8Hz2XFD83EsxgOiKSXVPLNd6yxjdqZj7OeAKFFU3bzGndnRbDIXaf987IN1imgUtP6wegfImoRStqxN4gEwwIMFsZCF86Ug4eLhlajLbWhudriDxDPBM/F9950aVxLwmWh9l5cRI=
file: postgres_exporter file_glob: true
file: release/*
on: on:
tags: true tags: true
branch: master branch: master

View File

@@ -1,6 +1,8 @@
FROM scratch FROM scratch
COPY postgres_exporter /postgres_exporter ARG binary
COPY $binary /postgres_exporter
EXPOSE 9187 EXPOSE 9187

108
Makefile
View File

@@ -1,36 +1,78 @@
COVERDIR = .coverage COVERDIR = .coverage
TOOLDIR = tools TOOLDIR = tools
BINDIR = bin
RELEASEDIR = release
GO_SRC := $(shell find . -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' ) DIRS = $(BINDIR) $(RELEASEDIR)
GO_DIRS := $(shell find . -type d -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' )
GO_SRC := $(shell find . -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' ! -path 'bin/*' ! -path 'release/*' )
GO_DIRS := $(shell find . -type d -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' ! -path 'bin/*' ! -path 'release/*' )
GO_PKGS := $(shell go list ./... | grep -v '/vendor/') GO_PKGS := $(shell go list ./... | grep -v '/vendor/')
CONTAINER_NAME ?= wrouesnel/postgres_exporter:latest CONTAINER_NAME ?= wrouesnel/postgres_exporter:latest
VERSION ?= $(shell git describe --dirty) BINARY := $(shell basename $(shell pwd))
VERSION ?= $(shell git describe --dirty 2>/dev/null)
VERSION_SHORT ?= $(shell git describe --abbrev=0 2>/dev/null)
ifeq ($(VERSION),)
VERSION := v0.0.0
endif
ifeq ($(VERSION_SHORT),)
VERSION_SHORT := v0.0.0
endif
# By default this list is filtered down to some common platforms.
platforms := $(subst /,-,$(shell go tool dist list | grep -e linux -e windows -e darwin | grep -e 386 -e amd64))
PLATFORM_BINS := $(patsubst %,$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%/$(BINARY),$(platforms))
PLATFORM_DIRS := $(patsubst %,$(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%,$(platforms))
PLATFORM_TARS := $(patsubst %,$(RELEASEDIR)/$(BINARY)_$(VERSION_SHORT)_%.tar.gz,$(platforms))
# These are evaluated on use, and so will have the correct values in the build
# rule (https://vic.demuzere.be/articles/golang-makefile-crosscompile/)
PLATFORMS_TEMP = $(subst -, ,$(patsubst $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_%/$(BINARY),%,$@))
GOOS = $(word 1, $(PLATFORMS_TEMP))
GOARCH = $(word 2, $(PLATFORMS_TEMP))
CURRENT_PLATFORM := $(BINDIR)/$(BINARY)_$(VERSION_SHORT)_$(shell go env GOOS)-$(shell go env GOARCH)/$(BINARY)
CONCURRENT_LINTERS ?=
ifeq ($(CONCURRENT_LINTERS),)
CONCURRENT_LINTERS = $(shell gometalinter --help | grep -o 'concurrency=\w*' | cut -d= -f2 | cut -d' ' -f1)
endif
CONCURRENT_LINTERS ?= $(shell cat /proc/cpuinfo | grep processor | wc -l)
LINTER_DEADLINE ?= 30s LINTER_DEADLINE ?= 30s
$(shell mkdir -p $(DIRS))
export PATH := $(TOOLDIR)/bin:$(PATH) export PATH := $(TOOLDIR)/bin:$(PATH)
SHELL := env PATH=$(PATH) /bin/bash SHELL := env PATH=$(PATH) /bin/bash
all: style lint test postgres_exporter all: style lint test binary
# Cross compilation (e.g. if you are on a Mac) binary: $(BINARY)
cross: docker-build docker
# Simple go build $(BINARY): $(CURRENT_PLATFORM)
postgres_exporter: $(GO_SRC) ln -sf $< $@
CGO_ENABLED=0 go build -a -ldflags "-extldflags '-static' -X main.Version=$(VERSION)" -o postgres_exporter .
postgres_exporter_integration_test: $(GO_SRC) $(PLATFORM_BINS): $(GO_SRC)
CGO_ENABLED=0 go test -c -tags integration \ CGO_ENABLED=0 GOOS=$(GOOS) GOARCH=$(GOARCH) go build -a \
-a -ldflags "-extldflags '-static' -X main.Version=$(VERSION)" -o postgres_exporter_integration_test -cover -covermode count . -ldflags "-extldflags '-static' -X main.Version=$(VERSION)" \
-o $@ .
$(PLATFORM_DIRS): $(PLATFORM_BINS)
$(PLATFORM_TARS): $(RELEASEDIR)/%.tar.gz : $(BINDIR)/%
tar -czf $@ -C $(BINDIR) $$(basename $<)
release-bin: $(PLATFORM_BINS)
release: $(PLATFORM_TARS)
# Take a go build and turn it into a minimal container # Take a go build and turn it into a minimal container
docker: postgres_exporter docker: $(CURRENT_PLATFORM)
docker build -t $(CONTAINER_NAME) . docker build --build-arg=binary=$(CURRENT_PLATFORM) -t $(CONTAINER_NAME) .
style: tools style: tools
gometalinter --disable-all --enable=gofmt --vendor gometalinter --disable-all --enable=gofmt --vendor
@@ -42,14 +84,17 @@ lint: tools
fmt: tools fmt: tools
gofmt -s -w $(GO_SRC) gofmt -s -w $(GO_SRC)
run-tests: tools postgres_exporter_integration_test: $(GO_SRC)
mkdir -p $(COVERDIR) CGO_ENABLED=0 go test -c -tags integration \
rm -f $(COVERDIR)/* -a -ldflags "-extldflags '-static' -X main.Version=$(VERSION)" \
-o postgres_exporter_integration_test -cover -covermode count .
test: tools
@mkdir -p $(COVERDIR)
@rm -f $(COVERDIR)/*
for pkg in $(GO_PKGS) ; do \ for pkg in $(GO_PKGS) ; do \
go test -v -covermode count -coverprofile=$(COVERDIR)/$$(echo $$pkg | tr '/' '-').out $$pkg || exit 1 ; \ go test -v -covermode count -coverprofile=$(COVERDIR)/$$(echo $$pkg | tr '/' '-').out $$pkg || exit 1 ; \
done done
test: run-tests
gocovmerge $(shell find $(COVERDIR) -name '*.out') > cover.test.out gocovmerge $(shell find $(COVERDIR) -name '*.out') > cover.test.out
test-integration: postgres_exporter postgres_exporter_integration_test test-integration: postgres_exporter postgres_exporter_integration_test
@@ -58,24 +103,13 @@ test-integration: postgres_exporter postgres_exporter_integration_test
cover.out: tools cover.out: tools
gocovmerge cover.*.out > cover.out gocovmerge cover.*.out > cover.out
# Do a self-contained docker build - we pull the official upstream container clean:
# and do a self-contained build. [ ! -z $(BINDIR) ] && [ -e $(BINDIR) ] && find $(BINDIR) -print -delete || /bin/true
docker-build: [ ! -z $(COVERDIR) ] && [ -e $(COVERDIR) ] && find $(COVERDIR) -print -delete || /bin/true
docker run -v $(shell pwd):/go/src/github.com/wrouesnel/postgres_exporter \ [ ! -z $(RELEASEDIR) ] && [ -e $(RELEASEDIR) ] && find $(RELEASEDIR) -print -delete || /bin/true
-v $(shell pwd):/real_src \ rm -f postgres_exporter postgres_exporter_integration_test
-e SHELL_UID=$(shell id -u) -e SHELL_GID=$(shell id -g) \
-w /go/src/github.com/wrouesnel/postgres_exporter \
golang:1.9-wheezy \
/bin/bash -c "make >&2 && chown $$SHELL_UID:$$SHELL_GID ./postgres_exporter"
docker build -t $(CONTAINER_NAME) .
push:
docker push $(CONTAINER_NAME)
tools: tools:
$(MAKE) -C $(TOOLDIR) $(MAKE) -C $(TOOLDIR)
clean: .PHONY: tools style fmt test all release binary clean
rm -rf postgres_exporter postgres_exporter_integration_test $(COVERDIR)
.PHONY: tools docker-build docker lint fmt test vet push cross clean

View File

@@ -126,3 +126,11 @@ GRANT SELECT ON postgres_exporter.pg_stat_replication TO postgres_exporter;
> ``` > ```
> DATA_SOURCE_NAME=postgresql://postgres_exporter:password@localhost:5432/postgres?sslmode=disable > DATA_SOURCE_NAME=postgresql://postgres_exporter:password@localhost:5432/postgres?sslmode=disable
> ``` > ```
# Hacking
* The build system is currently only supported for Linux-like platforms. It
depends on GNU Make.
* To build a copy for your current architecture run `make binary` or just `make`
This will create a symlink to the just built binary in the root directory.
* To build release tar balls run `make release`.

View File

@@ -9,13 +9,15 @@ SHELL := env PATH=$(PATH) /bin/bash
THIS_FILE := $(lastword $(MAKEFILE_LIST)) THIS_FILE := $(lastword $(MAKEFILE_LIST))
# This function is used to get the linters used by metalinter # This function is used to get the linters used by metalinter
get_metalinters := gometalinter --help | grep -oP ' \w+ \(.+\)' | tr -s ' ' | cut -d' ' -f3 | grep -oP '[^()]+' get_metalinters := gometalinter --help | grep -oP '\s+\w+:\s*\(.+\)' | tr -s ' ' | cut -d' ' -f3 | grep -oP '[^()]+'
# This is a list of external tools we want to vendor
TOOL_SRCS := github.com/kardianos/govendor \ TOOL_SRCS := github.com/kardianos/govendor \
github.com/wadey/gocovmerge \ github.com/wadey/gocovmerge \
github.com/mattn/goveralls \ github.com/mattn/goveralls \
github.com/alecthomas/gometalinter github.com/alecthomas/gometalinter
# This is populated by imported dependencies from gometalinter
METATOOL_SRCS := METATOOL_SRCS :=
GO_SRC := $(shell find $(SOURCEDIR) -name '*.go') GO_SRC := $(shell find $(SOURCEDIR) -name '*.go')

23
tools/vendor/github.com/GoASTScanner/gas/Dockerfile generated vendored Normal file
View File

@@ -0,0 +1,23 @@
# Docker version must be 17.05 or higher to allow multistage build
# See build and run instructions in README.md
# Builds Gas for utilization
FROM golang:1.8.1-alpine as builder
ENV workspace /go/src/github.com/GoASTScanner/gas
ENV GOPATH /go
COPY . $workspace
WORKDIR $workspace
RUN go vet $(go list ./... | grep -v /vendor/)
RUN CGO_ENABLED=0 go build -o gas .
########################################################
# Runs Gas on all Go files in the current directory when
# 'docker run' command in README is given
FROM alpine:3.6
COPY --from=builder /go/src/github.com/GoASTScanner/gas/gas /
# Mounted directory should be placed into the workdir
CMD /gas $(find . -path ./vendor -prune -o -type f -name "*.go")

View File

@@ -112,3 +112,20 @@ file. The output format is controlled by the '-fmt' flag, and the output file is
# Write output in json format to results.json # Write output in json format to results.json
$ gas -fmt=json -out=results.json *.go $ gas -fmt=json -out=results.json *.go
``` ```
### Docker container
A Dockerfile is included with the Gas source code to provide a container that
allows users to easily run Gas on their code. It builds Gas, then runs it on
all Go files in your current directory. Use the following commands to build
and run locally:
To build: (run command in cloned Gas source code directory)
docker build --build-arg http_proxy --build-arg https_proxy
--build-arg no_proxy -t goastscanner/gas:latest .
To run: (run command in desired directory with Go files)
docker run -v $PWD:$PWD --workdir $PWD goastscanner/gas:latest
Note: Docker version 17.05 or later is required (to permit multistage build).
```

View File

@@ -19,7 +19,6 @@
- [2. Analyse the debug output](#2-analyse-the-debug-output) - [2. Analyse the debug output](#2-analyse-the-debug-output)
- [3. Report an issue.](#3-report-an-issue) - [3. Report an issue.](#3-report-an-issue)
- [How do I filter issues between two git refs?](#how-do-i-filter-issues-between-two-git-refs) - [How do I filter issues between two git refs?](#how-do-i-filter-issues-between-two-git-refs)
- [Details](#details)
- [Checkstyle XML format](#checkstyle-xml-format) - [Checkstyle XML format](#checkstyle-xml-format)
<!-- /MarkdownTOC --> <!-- /MarkdownTOC -->
@@ -57,12 +56,13 @@ It is intended for use with editor/IDE integration.
- [go vet](https://golang.org/cmd/vet/) - Reports potential errors that otherwise compile. - [go vet](https://golang.org/cmd/vet/) - Reports potential errors that otherwise compile.
- [go tool vet --shadow](https://golang.org/cmd/vet/#hdr-Shadowed_variables) - Reports variables that may have been unintentionally shadowed. - [go tool vet --shadow](https://golang.org/cmd/vet/#hdr-Shadowed_variables) - Reports variables that may have been unintentionally shadowed.
- [gotype](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis similar to the Go compiler. - [gotype](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis similar to the Go compiler.
- [gotype -x](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis in external test packages (similar to the Go compiler).
- [deadcode](https://github.com/tsenart/deadcode) - Finds unused code. - [deadcode](https://github.com/tsenart/deadcode) - Finds unused code.
- [gocyclo](https://github.com/alecthomas/gocyclo) - Computes the cyclomatic complexity of functions. - [gocyclo](https://github.com/alecthomas/gocyclo) - Computes the cyclomatic complexity of functions.
- [golint](https://github.com/golang/lint) - Google's (mostly stylistic) linter. - [golint](https://github.com/golang/lint) - Google's (mostly stylistic) linter.
- [varcheck](https://github.com/opennota/check) - Find unused global variables and constants. - [varcheck](https://github.com/opennota/check) - Find unused global variables and constants.
- [structcheck](https://github.com/opennota/check) - Find unused struct fields. - [structcheck](https://github.com/opennota/check) - Find unused struct fields.
- [aligncheck](https://github.com/opennota/check) - Warn about un-optimally aligned structures. - [maligned](https://github.com/mdempsky/maligned) - Detect structs that would take less memory if their fields were sorted.
- [errcheck](https://github.com/kisielk/errcheck) - Check that error return values are used. - [errcheck](https://github.com/kisielk/errcheck) - Check that error return values are used.
- [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - Run staticcheck, gosimple and unused, sharing work. - [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - Run staticcheck, gosimple and unused, sharing work.
- [dupl](https://github.com/mibk/dupl) - Reports potentially duplicated code. - [dupl](https://github.com/mibk/dupl) - Reports potentially duplicated code.
@@ -81,6 +81,7 @@ Disabled by default (enable with `--enable=<linter>`):
- [gosimple](https://github.com/dominikh/go-tools/tree/master/cmd/gosimple) - Report simplifications in code. - [gosimple](https://github.com/dominikh/go-tools/tree/master/cmd/gosimple) - Report simplifications in code.
- [lll](https://github.com/walle/lll) - Report long lines (see `--line-length=N`). - [lll](https://github.com/walle/lll) - Report long lines (see `--line-length=N`).
- [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words. - [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words.
- [nakedret](https://github.com/alexkohler/nakedret) - Finds naked returns.
- [unparam](https://github.com/mvdan/unparam) - Find unused function parameters. - [unparam](https://github.com/mvdan/unparam) - Find unused function parameters.
- [unused](https://github.com/dominikh/go-tools/tree/master/cmd/unused) - Find unused variables. - [unused](https://github.com/dominikh/go-tools/tree/master/cmd/unused) - Find unused variables.
- [safesql](https://github.com/stripe/safesql) - Finds potential SQL injection vulnerabilities. - [safesql](https://github.com/stripe/safesql) - Finds potential SQL injection vulnerabilities.
@@ -91,14 +92,15 @@ Additional linters can be added through the command line with `--linter=NAME:COM
## Configuration file ## Configuration file
gometalinter now supports a JSON configuration file which can be loaded via gometalinter now supports a JSON configuration file which can be loaded via
`--config=<file>`. The format of this file is determined by the Config struct `--config=<file>`. The format of this file is determined by the `Config` struct
in `config.go`. in [config.go](https://github.com/alecthomas/gometalinter/blob/master/config.go).
The configuration file mostly corresponds to command-line flags, with the following exceptions: The configuration file mostly corresponds to command-line flags, with the following exceptions:
- Linters defined in the configuration file will overlay existing definitions, not replace them. - Linters defined in the configuration file will overlay existing definitions, not replace them.
- "Enable" defines the exact set of linters that will be enabled (default - "Enable" defines the exact set of linters that will be enabled (default
linters are disabled). linters are disabled). `--help` displays the list of default linters with the exact names
you must use.
Here is an example configuration file: Here is an example configuration file:
@@ -108,6 +110,34 @@ Here is an example configuration file:
} }
``` ```
### Adding Custom linters
Linters can be added and customized from the config file using the `Linters` field.
Linters supports the following fields:
* `Command` - the path to the linter binary and any default arguments
* `Pattern` - a regular expression used to parse the linter output
* `IsFast` - if the linter should be run when the `--fast` flag is used
* `PartitionStrategy` - how paths args should be passed to the linter command:
* `directories` - call the linter once with a list of all the directories
* `files` - call the linter once with a list of all the files
* `packages` - call the linter once with a list of all the package paths
* `files-by-package` - call the linter once per package with a list of the
files in the package.
* `single-directory` - call the linter once per directory
The config for default linters can be overridden by using the name of the
linter.
Additional linters can be configured via the command line using the format
`NAME:COMMAND:PATTERN`.
Example:
```
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf:PATH:LINE:MESSAGE' .
```
## Installing ## Installing
There are two options for installing gometalinter. There are two options for installing gometalinter.
@@ -171,7 +201,8 @@ Install all known linters:
$ gometalinter --install $ gometalinter --install
Installing: Installing:
structcheck structcheck
aligncheck maligned
nakedret
deadcode deadcode
gocyclo gocyclo
ineffassign ineffassign
@@ -308,21 +339,6 @@ gometalinter |& revgrep master # Show issues between master and HEAD (or
gometalinter |& revgrep origin/master # Show issues that haven't been pushed. gometalinter |& revgrep origin/master # Show issues that haven't been pushed.
``` ```
## Details
Additional linters can be configured via the command line:
```
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf {path}:PATH:LINE:MESSAGE' .
stutter.go:21:15:warning: error return value not checked (defer a.Close()) (errcheck)
stutter.go:22:15:warning: error return value not checked (defer a.Close()) (errcheck)
stutter.go:27:6:warning: error return value not checked (doit() // test for errcheck) (errcheck)
stutter.go:9::warning: unused global variable unusedGlobal (varcheck)
stutter.go:13::warning: unused struct field MyStruct.Unused (structcheck)
stutter.go:12:6:warning: exported type MyStruct should have comment or be unexported (golint)
stutter.go:16:6:warning: exported type PublicUndocumented should have comment or be unexported (deadcode)
```
## Checkstyle XML format ## Checkstyle XML format
`gometalinter` supports [checkstyle](http://checkstyle.sourceforge.net/) `gometalinter` supports [checkstyle](http://checkstyle.sourceforge.net/)

View File

@@ -5,27 +5,21 @@ import (
"strings" "strings"
) )
type ( type issueKey struct {
issueKey struct {
path string path string
line, col int line, col int
message string message string
} }
multiIssue struct { type multiIssue struct {
*Issue *Issue
linterNames []string linterNames []string
} }
)
func maybeAggregateIssues(issues chan *Issue) chan *Issue { // AggregateIssueChan reads issues from a channel, aggregates issues which have
if !config.Aggregate { // the same file, line, vol, and message, and returns aggregated issues on
return issues // a new channel.
} func AggregateIssueChan(issues chan *Issue) chan *Issue {
return aggregateIssues(issues)
}
func aggregateIssues(issues chan *Issue) chan *Issue {
out := make(chan *Issue, 1000000) out := make(chan *Issue, 1000000)
issueMap := make(map[issueKey]*multiIssue) issueMap := make(map[issueKey]*multiIssue)
go func() { go func() {

View File

@@ -4,7 +4,7 @@ import (
"encoding/xml" "encoding/xml"
"fmt" "fmt"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type checkstyleOutput struct { type checkstyleOutput struct {

View File

@@ -8,12 +8,12 @@ import (
) )
// Config for gometalinter. This can be loaded from a JSON file with --config. // Config for gometalinter. This can be loaded from a JSON file with --config.
type Config struct { // nolint: aligncheck type Config struct { // nolint: maligned
// A map of linter name to "<command>:<pattern>". // A map from linter name -> <LinterConfig|string>.
// //
// <command> should always include {path} as the target directory to execute. Globs in <command> // For backwards compatibility, the value stored in the JSON blob can also
// are expanded by gometalinter (not by the shell). // be a string of the form "<command>:<pattern>".
Linters map[string]string Linters map[string]StringOrLinterConfig
// The set of linters that should be enabled. // The set of linters that should be enabled.
Enable []string Enable []string
@@ -51,6 +51,35 @@ type Config struct { // nolint: aligncheck
EnableGC bool EnableGC bool
Aggregate bool Aggregate bool
EnableAll bool EnableAll bool
// Warn if a nolint directive was never matched to a linter issue
WarnUnmatchedDirective bool
formatTemplate *template.Template
}
type StringOrLinterConfig LinterConfig
func (c *StringOrLinterConfig) UnmarshalJSON(raw []byte) error {
var linterConfig LinterConfig
// first try to un-marshall directly into struct
origErr := json.Unmarshal(raw, &linterConfig)
if origErr == nil {
*c = StringOrLinterConfig(linterConfig)
return nil
}
// i.e. bytes didn't represent the struct, treat them as a string
var linterSpec string
if err := json.Unmarshal(raw, &linterSpec); err != nil {
return origErr
}
linter, err := parseLinterConfigSpec("", linterSpec)
if err != nil {
return err
}
*c = StringOrLinterConfig(linter)
return nil
} }
type jsonDuration time.Duration type jsonDuration time.Duration
@@ -70,17 +99,16 @@ func (td *jsonDuration) Duration() time.Duration {
return time.Duration(*td) return time.Duration(*td)
} }
// TODO: should be a field on Config struct
var formatTemplate = &template.Template{}
var sortKeys = []string{"none", "path", "line", "column", "severity", "message", "linter"} var sortKeys = []string{"none", "path", "line", "column", "severity", "message", "linter"}
// Configuration defaults. // Configuration defaults.
var config = &Config{ var config = &Config{
Format: "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})", Format: DefaultIssueFormat,
Linters: map[string]StringOrLinterConfig{},
Severity: map[string]string{ Severity: map[string]string{
"gotype": "error", "gotype": "error",
"gotypex": "error",
"test": "error", "test": "error",
"testify": "error", "testify": "error",
"vet": "error", "vet": "error",

View File

@@ -1,6 +1,7 @@
package main package main
import ( import (
"fmt"
"go/ast" "go/ast"
"go/parser" "go/parser"
"go/token" "go/token"
@@ -14,6 +15,7 @@ type ignoredRange struct {
col int col int
start, end int start, end int
linters []string linters []string
matched bool
} }
func (i *ignoredRange) matches(issue *Issue) bool { func (i *ignoredRange) matches(issue *Issue) bool {
@@ -35,6 +37,14 @@ func (i *ignoredRange) near(col, start int) bool {
return col == i.col && i.end == start-1 return col == i.col && i.end == start-1
} }
func (i *ignoredRange) String() string {
linters := strings.Join(i.linters, ",")
if len(i.linters) == 0 {
linters = "all"
}
return fmt.Sprintf("%s:%d-%d", linters, i.start, i.end)
}
type ignoredRanges []*ignoredRange type ignoredRanges []*ignoredRange
func (ir ignoredRanges) Len() int { return len(ir) } func (ir ignoredRanges) Len() int { return len(ir) }
@@ -66,12 +76,43 @@ func (d *directiveParser) IsIgnored(issue *Issue) bool {
d.lock.Unlock() d.lock.Unlock()
for _, r := range ranges { for _, r := range ranges {
if r.matches(issue) { if r.matches(issue) {
debug("nolint: matched %s to issue %s", r, issue)
r.matched = true
return true return true
} }
} }
return false return false
} }
// Unmatched returns all the ranges which were never used to ignore an issue
func (d *directiveParser) Unmatched() map[string]ignoredRanges {
unmatched := map[string]ignoredRanges{}
for path, ranges := range d.files {
for _, ignore := range ranges {
if !ignore.matched {
unmatched[path] = append(unmatched[path], ignore)
}
}
}
return unmatched
}
// LoadFiles from a list of directories
func (d *directiveParser) LoadFiles(paths []string) error {
d.lock.Lock()
defer d.lock.Unlock()
filenames, err := pathsToFileGlobs(paths)
if err != nil {
return err
}
for _, filename := range filenames {
ranges := d.parseFile(filename)
sort.Sort(ranges)
d.files[filename] = ranges
}
return nil
}
// Takes a set of ignoredRanges, determines if they immediately precede a statement // Takes a set of ignoredRanges, determines if they immediately precede a statement
// construct, and expands the range to include that construct. Why? So you can // construct, and expands the range to include that construct. Why? So you can
// precede a function or struct with //nolint // precede a function or struct with //nolint
@@ -150,7 +191,28 @@ func filterIssuesViaDirectives(directives *directiveParser, issues chan *Issue)
out <- issue out <- issue
} }
} }
if config.WarnUnmatchedDirective {
for _, issue := range warnOnUnusedDirective(directives) {
out <- issue
}
}
close(out) close(out)
}() }()
return out return out
} }
func warnOnUnusedDirective(directives *directiveParser) []*Issue {
out := []*Issue{}
for path, ranges := range directives.Unmatched() {
for _, ignore := range ranges {
issue, _ := NewIssue("nolint", config.formatTemplate)
issue.Path = path
issue.Line = ignore.start
issue.Col = ignore.col
issue.Message = "nolint directive did not match any issue"
out = append(out, issue)
}
}
return out
}

View File

@@ -8,14 +8,13 @@ import (
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
"github.com/google/shlex" "github.com/google/shlex"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type Vars map[string]string type Vars map[string]string
@@ -41,34 +40,8 @@ func (v Vars) Replace(s string) string {
return s return s
} }
// Severity of linter message.
type Severity string
// Linter message severity levels.
const ( // nolint: deadcode
Error Severity = "error"
Warning Severity = "warning"
)
type Issue struct {
Linter string `json:"linter"`
Severity Severity `json:"severity"`
Path string `json:"path"`
Line int `json:"line"`
Col int `json:"col"`
Message string `json:"message"`
}
func (i *Issue) String() string {
buf := new(bytes.Buffer)
err := formatTemplate.Execute(buf, i)
kingpin.FatalIfError(err, "Invalid output format")
return buf.String()
}
type linterState struct { type linterState struct {
*Linter *Linter
paths []string
issues chan *Issue issues chan *Issue
vars Vars vars Vars
exclude *regexp.Regexp exclude *regexp.Regexp
@@ -76,26 +49,34 @@ type linterState struct {
deadline <-chan time.Time deadline <-chan time.Time
} }
func (l *linterState) Partitions() ([][]string, error) { func (l *linterState) Partitions(paths []string) ([][]string, error) {
command := l.vars.Replace(l.Command) cmdArgs, err := parseCommand(l.command())
cmdArgs, err := parseCommand(command)
if err != nil { if err != nil {
return nil, err return nil, err
} }
parts, err := l.Linter.PartitionStrategy(cmdArgs, l.paths) parts, err := l.Linter.PartitionStrategy(cmdArgs, paths)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return parts, nil return parts, nil
} }
func (l *linterState) command() string {
return l.vars.Replace(l.Command)
}
func runLinters(linters map[string]*Linter, paths []string, concurrency int, exclude, include *regexp.Regexp) (chan *Issue, chan error) { func runLinters(linters map[string]*Linter, paths []string, concurrency int, exclude, include *regexp.Regexp) (chan *Issue, chan error) {
errch := make(chan error, len(linters)) errch := make(chan error, len(linters))
concurrencych := make(chan bool, concurrency) concurrencych := make(chan bool, concurrency)
incomingIssues := make(chan *Issue, 1000000) incomingIssues := make(chan *Issue, 1000000)
processedIssues := filterIssuesViaDirectives(
newDirectiveParser(), directiveParser := newDirectiveParser()
maybeSortIssues(maybeAggregateIssues(incomingIssues))) if config.WarnUnmatchedDirective {
directiveParser.LoadFiles(paths)
}
processedIssues := maybeSortIssues(filterIssuesViaDirectives(
directiveParser, maybeAggregateIssues(incomingIssues)))
vars := Vars{ vars := Vars{
"duplthreshold": fmt.Sprintf("%d", config.DuplThreshold), "duplthreshold": fmt.Sprintf("%d", config.DuplThreshold),
@@ -105,43 +86,46 @@ func runLinters(linters map[string]*Linter, paths []string, concurrency int, exc
"min_occurrences": fmt.Sprintf("%d", config.MinOccurrences), "min_occurrences": fmt.Sprintf("%d", config.MinOccurrences),
"min_const_length": fmt.Sprintf("%d", config.MinConstLength), "min_const_length": fmt.Sprintf("%d", config.MinConstLength),
"tests": "", "tests": "",
"not_tests": "true",
} }
if config.Test { if config.Test {
vars["tests"] = "-t" vars["tests"] = "true"
vars["not_tests"] = ""
} }
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
id := 1
for _, linter := range linters { for _, linter := range linters {
deadline := time.After(config.Deadline.Duration()) deadline := time.After(config.Deadline.Duration())
state := &linterState{ state := &linterState{
Linter: linter, Linter: linter,
issues: incomingIssues, issues: incomingIssues,
paths: paths,
vars: vars, vars: vars,
exclude: exclude, exclude: exclude,
include: include, include: include,
deadline: deadline, deadline: deadline,
} }
partitions, err := state.Partitions() partitions, err := state.Partitions(paths)
if err != nil { if err != nil {
errch <- err errch <- err
continue continue
} }
for _, args := range partitions { for _, args := range partitions {
wg.Add(1) wg.Add(1)
concurrencych <- true
// Call the goroutine with a copy of the args array so that the // Call the goroutine with a copy of the args array so that the
// contents of the array are not modified by the next iteration of // contents of the array are not modified by the next iteration of
// the above for loop // the above for loop
go func(args []string) { go func(id int, args []string) {
concurrencych <- true err := executeLinter(id, state, args)
err := executeLinter(state, args)
if err != nil { if err != nil {
errch <- err errch <- err
} }
<-concurrencych <-concurrencych
wg.Done() wg.Done()
}(append(args)) }(id, args)
id++
} }
} }
@@ -153,13 +137,14 @@ func runLinters(linters map[string]*Linter, paths []string, concurrency int, exc
return processedIssues, errch return processedIssues, errch
} }
func executeLinter(state *linterState, args []string) error { func executeLinter(id int, state *linterState, args []string) error {
if len(args) == 0 { if len(args) == 0 {
return fmt.Errorf("missing linter command") return fmt.Errorf("missing linter command")
} }
start := time.Now() start := time.Now()
debug("executing %s", strings.Join(args, " ")) dbg := namespacedDebug(fmt.Sprintf("[%s.%d]: ", state.Name, id))
dbg("executing %s", strings.Join(args, " "))
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
command := args[0] command := args[0]
cmd := exec.Command(command, args[1:]...) // nolint: gas cmd := exec.Command(command, args[1:]...) // nolint: gas
@@ -191,12 +176,12 @@ func executeLinter(state *linterState, args []string) error {
} }
if err != nil { if err != nil {
debug("warning: %s returned %s: %s", command, err, buf.String()) dbg("warning: %s returned %s: %s", command, err, buf.String())
} }
processOutput(state, buf.Bytes()) processOutput(dbg, state, buf.Bytes())
elapsed := time.Since(start) elapsed := time.Since(start)
debug("%s linter took %s", state.Name, elapsed) dbg("%s linter took %s", state.Name, elapsed)
return nil return nil
} }
@@ -216,10 +201,10 @@ func parseCommand(command string) ([]string, error) {
} }
// nolint: gocyclo // nolint: gocyclo
func processOutput(state *linterState, out []byte) { func processOutput(dbg debugFunction, state *linterState, out []byte) {
re := state.regex re := state.regex
all := re.FindAllSubmatchIndex(out, -1) all := re.FindAllSubmatchIndex(out, -1)
debug("%s hits %d: %s", state.Name, len(all), state.Pattern) dbg("%s hits %d: %s", state.Name, len(all), state.Pattern)
cwd, err := os.Getwd() cwd, err := os.Getwd()
if err != nil { if err != nil {
@@ -239,7 +224,9 @@ func processOutput(state *linterState, out []byte) {
group = append(group, fragment) group = append(group, fragment)
} }
issue := &Issue{Line: 1, Linter: state.Linter.Name} issue, err := NewIssue(state.Linter.Name, config.formatTemplate)
kingpin.FatalIfError(err, "Invalid output format")
for i, name := range re.SubexpNames() { for i, name := range re.SubexpNames() {
if group[i] == nil { if group[i] == nil {
continue continue
@@ -275,8 +262,6 @@ func processOutput(state *linterState, out []byte) {
} }
if sev, ok := config.Severity[state.Name]; ok { if sev, ok := config.Severity[state.Name]; ok {
issue.Severity = Severity(sev) issue.Severity = Severity(sev)
} else {
issue.Severity = Warning
} }
if state.exclude != nil && state.exclude.MatchString(issue.String()) { if state.exclude != nil && state.exclude.MatchString(issue.String()) {
continue continue
@@ -319,66 +304,16 @@ func resolvePath(path string) string {
return path return path
} }
type sortedIssues struct {
issues []*Issue
order []string
}
func (s *sortedIssues) Len() int { return len(s.issues) }
func (s *sortedIssues) Swap(i, j int) { s.issues[i], s.issues[j] = s.issues[j], s.issues[i] }
// nolint: gocyclo
func (s *sortedIssues) Less(i, j int) bool {
l, r := s.issues[i], s.issues[j]
for _, key := range s.order {
switch key {
case "path":
if l.Path > r.Path {
return false
}
case "line":
if l.Line > r.Line {
return false
}
case "column":
if l.Col > r.Col {
return false
}
case "severity":
if l.Severity > r.Severity {
return false
}
case "message":
if l.Message > r.Message {
return false
}
case "linter":
if l.Linter > r.Linter {
return false
}
}
}
return true
}
func maybeSortIssues(issues chan *Issue) chan *Issue { func maybeSortIssues(issues chan *Issue) chan *Issue {
if reflect.DeepEqual([]string{"none"}, config.Sort) { if reflect.DeepEqual([]string{"none"}, config.Sort) {
return issues return issues
} }
out := make(chan *Issue, 1000000) return SortIssueChan(issues, config.Sort)
sorted := &sortedIssues{
issues: []*Issue{},
order: config.Sort,
} }
go func() {
for issue := range issues { func maybeAggregateIssues(issues chan *Issue) chan *Issue {
sorted.issues = append(sorted.issues, issue) if !config.Aggregate {
return issues
} }
sort.Sort(sorted) return AggregateIssueChan(issues)
for _, issue := range sorted.issues {
out <- issue
}
close(out)
}()
return out
} }

View File

@@ -0,0 +1,114 @@
package main
import (
"bytes"
"fmt"
"io/ioutil"
"sort"
"strings"
"text/template"
)
// DefaultIssueFormat used to print an issue
const DefaultIssueFormat = "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})"
// Severity of linter message
type Severity string
// Linter message severity levels.
const (
Error Severity = "error"
Warning Severity = "warning"
)
type Issue struct {
Linter string `json:"linter"`
Severity Severity `json:"severity"`
Path string `json:"path"`
Line int `json:"line"`
Col int `json:"col"`
Message string `json:"message"`
formatTmpl *template.Template
}
// NewIssue returns a new issue. Returns an error if formatTmpl is not a valid
// template for an Issue.
func NewIssue(linter string, formatTmpl *template.Template) (*Issue, error) {
issue := &Issue{
Line: 1,
Severity: Warning,
Linter: linter,
formatTmpl: formatTmpl,
}
err := formatTmpl.Execute(ioutil.Discard, issue)
return issue, err
}
func (i *Issue) String() string {
if i.formatTmpl == nil {
col := ""
if i.Col != 0 {
col = fmt.Sprintf("%d", i.Col)
}
return fmt.Sprintf("%s:%d:%s:%s: %s (%s)", strings.TrimSpace(i.Path), i.Line, col, i.Severity, strings.TrimSpace(i.Message), i.Linter)
}
buf := new(bytes.Buffer)
_ = i.formatTmpl.Execute(buf, i)
return buf.String()
}
type sortedIssues struct {
issues []*Issue
order []string
}
func (s *sortedIssues) Len() int { return len(s.issues) }
func (s *sortedIssues) Swap(i, j int) { s.issues[i], s.issues[j] = s.issues[j], s.issues[i] }
func (s *sortedIssues) Less(i, j int) bool {
l, r := s.issues[i], s.issues[j]
return CompareIssue(*l, *r, s.order)
}
// CompareIssue two Issues and return true if left should sort before right
// nolint: gocyclo
func CompareIssue(l, r Issue, order []string) bool {
for _, key := range order {
switch {
case key == "path" && l.Path != r.Path:
return l.Path < r.Path
case key == "line" && l.Line != r.Line:
return l.Line < r.Line
case key == "column" && l.Col != r.Col:
return l.Col < r.Col
case key == "severity" && l.Severity != r.Severity:
return l.Severity < r.Severity
case key == "message" && l.Message != r.Message:
return l.Message < r.Message
case key == "linter" && l.Linter != r.Linter:
return l.Linter < r.Linter
}
}
return true
}
// SortIssueChan reads issues from one channel, sorts them, and returns them to another
// channel
func SortIssueChan(issues chan *Issue, order []string) chan *Issue {
out := make(chan *Issue, 1000000)
sorted := &sortedIssues{
issues: []*Issue{},
order: order,
}
go func() {
for issue := range issues {
sorted.issues = append(sorted.issues, issue)
}
sort.Sort(sorted)
for _, issue := range sorted.issues {
out <- issue
}
close(out)
}()
return out
}

View File

@@ -8,11 +8,10 @@ import (
"sort" "sort"
"strings" "strings"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type LinterConfig struct { type LinterConfig struct {
Name string
Command string Command string
Pattern string Pattern string
InstallFrom string InstallFrom string
@@ -23,11 +22,12 @@ type LinterConfig struct {
type Linter struct { type Linter struct {
LinterConfig LinterConfig
Name string
regex *regexp.Regexp regex *regexp.Regexp
} }
// NewLinter returns a new linter from a config // NewLinter returns a new linter from a config
func NewLinter(config LinterConfig) (*Linter, error) { func NewLinter(name string, config LinterConfig) (*Linter, error) {
if p, ok := predefinedPatterns[config.Pattern]; ok { if p, ok := predefinedPatterns[config.Pattern]; ok {
config.Pattern = p config.Pattern = p
} }
@@ -35,8 +35,12 @@ func NewLinter(config LinterConfig) (*Linter, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
if config.PartitionStrategy == nil {
config.PartitionStrategy = partitionPathsAsDirectories
}
return &Linter{ return &Linter{
LinterConfig: config, LinterConfig: config,
Name: name,
regex: regex, regex: regex,
}, nil }, nil
} }
@@ -50,26 +54,41 @@ var predefinedPatterns = map[string]string{
"PATH:LINE:MESSAGE": `^(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*)$`, "PATH:LINE:MESSAGE": `^(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*)$`,
} }
func getLinterByName(name string, customSpec string) *Linter { func getLinterByName(name string, overrideConf LinterConfig) *Linter {
if customSpec != "" { conf := defaultLinters[name]
return parseLinterSpec(name, customSpec) if val := overrideConf.Command; val != "" {
conf.Command = val
} }
linter, _ := NewLinter(defaultLinters[name]) if val := overrideConf.Pattern; val != "" {
conf.Pattern = val
}
if val := overrideConf.InstallFrom; val != "" {
conf.InstallFrom = val
}
if overrideConf.IsFast {
conf.IsFast = true
}
if val := overrideConf.PartitionStrategy; val != nil {
conf.PartitionStrategy = val
}
linter, _ := NewLinter(name, conf)
return linter return linter
} }
func parseLinterSpec(name string, spec string) *Linter { func parseLinterConfigSpec(name string, spec string) (LinterConfig, error) {
parts := strings.SplitN(spec, ":", 2) parts := strings.SplitN(spec, ":", 2)
if len(parts) < 2 { if len(parts) < 2 {
kingpin.Fatalf("invalid linter: %q", spec) return LinterConfig{}, fmt.Errorf("linter spec needs at least two components")
} }
config := defaultLinters[name] config := defaultLinters[name]
config.Command, config.Pattern = parts[0], parts[1] config.Command, config.Pattern = parts[0], parts[1]
if predefined, ok := predefinedPatterns[config.Pattern]; ok {
config.Pattern = predefined
}
linter, err := NewLinter(config) return config, nil
kingpin.FatalIfError(err, "invalid linter %q", name)
return linter
} }
func makeInstallCommand(linters ...string) []string { func makeInstallCommand(linters ...string) []string {
@@ -148,9 +167,9 @@ func installLinters() {
func getDefaultLinters() []*Linter { func getDefaultLinters() []*Linter {
out := []*Linter{} out := []*Linter{}
for _, config := range defaultLinters { for name, config := range defaultLinters {
linter, err := NewLinter(config) linter, err := NewLinter(name, config)
kingpin.FatalIfError(err, "invalid linter %q", config.Name) kingpin.FatalIfError(err, "invalid linter %q", name)
out = append(out, linter) out = append(out, linter)
} }
return out return out
@@ -166,226 +185,228 @@ func defaultEnabled() []string {
return enabled return enabled
} }
func validateLinters(linters map[string]*Linter, config *Config) error {
var unknownLinters []string
for name := range linters {
if _, isDefault := defaultLinters[name]; !isDefault {
if _, isCustom := config.Linters[name]; !isCustom {
unknownLinters = append(unknownLinters, name)
}
}
}
if len(unknownLinters) > 0 {
return fmt.Errorf("unknown linters: %s", strings.Join(unknownLinters, ", "))
}
return nil
}
const vetPattern = `^(?:vet:.*?\.go:\s+(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*))|(?:(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*))$` const vetPattern = `^(?:vet:.*?\.go:\s+(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*))|(?:(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*))$`
var defaultLinters = map[string]LinterConfig{ var defaultLinters = map[string]LinterConfig{
"aligncheck": { "maligned": {
Name: "aligncheck", Command: "maligned",
Command: "aligncheck",
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
InstallFrom: "github.com/opennota/check/cmd/aligncheck", InstallFrom: "github.com/mdempsky/maligned",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"deadcode": { "deadcode": {
Name: "deadcode",
Command: "deadcode", Command: "deadcode",
Pattern: `^deadcode: (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`, Pattern: `^deadcode: (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
InstallFrom: "github.com/tsenart/deadcode", InstallFrom: "github.com/tsenart/deadcode",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
}, },
"dupl": { "dupl": {
Name: "dupl",
Command: `dupl -plumbing -threshold {duplthreshold}`, Command: `dupl -plumbing -threshold {duplthreshold}`,
Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)-\d+:\s*(?P<message>.*)$`, Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)-\d+:\s*(?P<message>.*)$`,
InstallFrom: "github.com/mibk/dupl", InstallFrom: "github.com/mibk/dupl",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"errcheck": { "errcheck": {
Name: "errcheck", Command: `errcheck -abspath {not_tests=-ignoretests}`,
Command: `errcheck -abspath`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/kisielk/errcheck", InstallFrom: "github.com/kisielk/errcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"gas": { "gas": {
Name: "gas",
Command: `gas -fmt=csv`, Command: `gas -fmt=csv`,
Pattern: `^(?P<path>.*?\.go),(?P<line>\d+),(?P<message>[^,]+,[^,]+,[^,]+)`, Pattern: `^(?P<path>.*?\.go),(?P<line>\d+),(?P<message>[^,]+,[^,]+,[^,]+)`,
InstallFrom: "github.com/GoASTScanner/gas", InstallFrom: "github.com/GoASTScanner/gas",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsFiles,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"goconst": { "goconst": {
Name: "goconst",
Command: `goconst -min-occurrences {min_occurrences} -min-length {min_const_length}`, Command: `goconst -min-occurrences {min_occurrences} -min-length {min_const_length}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/jgautheron/goconst/cmd/goconst", InstallFrom: "github.com/jgautheron/goconst/cmd/goconst",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gocyclo": { "gocyclo": {
Name: "gocyclo",
Command: `gocyclo -over {mincyclo}`, Command: `gocyclo -over {mincyclo}`,
Pattern: `^(?P<cyclo>\d+)\s+\S+\s(?P<function>\S+)\s+(?P<path>.*?\.go):(?P<line>\d+):(\d+)$`, Pattern: `^(?P<cyclo>\d+)\s+\S+\s(?P<function>\S+)\s+(?P<path>.*?\.go):(?P<line>\d+):(\d+)$`,
InstallFrom: "github.com/alecthomas/gocyclo", InstallFrom: "github.com/alecthomas/gocyclo",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gofmt": { "gofmt": {
Name: "gofmt",
Command: `gofmt -l -s`, Command: `gofmt -l -s`,
Pattern: `^(?P<path>.*?\.go)$`, Pattern: `^(?P<path>.*?\.go)$`,
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"goimports": { "goimports": {
Name: "goimports",
Command: `goimports -l`, Command: `goimports -l`,
Pattern: `^(?P<path>.*?\.go)$`, Pattern: `^(?P<path>.*?\.go)$`,
InstallFrom: "golang.org/x/tools/cmd/goimports", InstallFrom: "golang.org/x/tools/cmd/goimports",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"golint": { "golint": {
Name: "golint",
Command: `golint -min_confidence {min_confidence}`, Command: `golint -min_confidence {min_confidence}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/golang/lint/golint", InstallFrom: "github.com/golang/lint/golint",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gosimple": { "gosimple": {
Name: "gosimple",
Command: `gosimple`, Command: `gosimple`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/gosimple", InstallFrom: "honnef.co/go/tools/cmd/gosimple",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"gotype": { "gotype": {
Name: "gotype",
Command: `gotype -e {tests=-t}`, Command: `gotype -e {tests=-t}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "golang.org/x/tools/cmd/gotype", InstallFrom: "golang.org/x/tools/cmd/gotype",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsByDirectory,
defaultEnabled: true,
IsFast: true,
},
"gotypex": {
Command: `gotype -e -x`,
Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "golang.org/x/tools/cmd/gotype",
PartitionStrategy: partitionPathsByDirectory,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"ineffassign": { "ineffassign": {
Name: "ineffassign",
Command: `ineffassign -n`, Command: `ineffassign -n`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/gordonklaus/ineffassign", InstallFrom: "github.com/gordonklaus/ineffassign",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"interfacer": { "interfacer": {
Name: "interfacer",
Command: `interfacer`, Command: `interfacer`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mvdan/interfacer/cmd/interfacer", InstallFrom: "mvdan.cc/interfacer",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"lll": { "lll": {
Name: "lll",
Command: `lll -g -l {maxlinelength}`, Command: `lll -g -l {maxlinelength}`,
Pattern: `PATH:LINE:MESSAGE`, Pattern: `PATH:LINE:MESSAGE`,
InstallFrom: "github.com/walle/lll/cmd/lll", InstallFrom: "github.com/walle/lll/cmd/lll",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"megacheck": { "megacheck": {
Name: "megacheck",
Command: `megacheck`, Command: `megacheck`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/megacheck", InstallFrom: "honnef.co/go/tools/cmd/megacheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"misspell": { "misspell": {
Name: "misspell",
Command: `misspell -j 1`, Command: `misspell -j 1`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/client9/misspell/cmd/misspell", InstallFrom: "github.com/client9/misspell/cmd/misspell",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"nakedret": {
Command: `nakedret`,
Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)\s*(?P<message>.*)$`,
InstallFrom: "github.com/alexkohler/nakedret",
PartitionStrategy: partitionPathsAsDirectories,
},
"safesql": { "safesql": {
Name: "safesql",
Command: `safesql`, Command: `safesql`,
Pattern: `^- (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+)$`, Pattern: `^- (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+)$`,
InstallFrom: "github.com/stripe/safesql", InstallFrom: "github.com/stripe/safesql",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"staticcheck": { "staticcheck": {
Name: "staticcheck",
Command: `staticcheck`, Command: `staticcheck`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/staticcheck", InstallFrom: "honnef.co/go/tools/cmd/staticcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"structcheck": { "structcheck": {
Name: "structcheck",
Command: `structcheck {tests=-t}`, Command: `structcheck {tests=-t}`,
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
InstallFrom: "github.com/opennota/check/cmd/structcheck", InstallFrom: "github.com/opennota/check/cmd/structcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"test": { "test": {
Name: "test",
Command: `go test`, Command: `go test`,
Pattern: `^--- FAIL: .*$\s+(?P<path>.*?\.go):(?P<line>\d+): (?P<message>.*)$`, Pattern: `^--- FAIL: .*$\s+(?P<path>.*?\.go):(?P<line>\d+): (?P<message>.*)$`,
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"testify": { "testify": {
Name: "testify",
Command: `go test`, Command: `go test`,
Pattern: `Location:\s+(?P<path>.*?\.go):(?P<line>\d+)$\s+Error:\s+(?P<message>[^\n]+)`, Pattern: `Location:\s+(?P<path>.*?\.go):(?P<line>\d+)$\s+Error:\s+(?P<message>[^\n]+)`,
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"unconvert": { "unconvert": {
Name: "unconvert",
Command: `unconvert`, Command: `unconvert`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mdempsky/unconvert", InstallFrom: "github.com/mdempsky/unconvert",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"unparam": { "unparam": {
Name: "unparam", Command: `unparam {not_tests=-tests=false}`,
Command: `unparam`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mvdan/unparam", InstallFrom: "mvdan.cc/unparam",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"unused": { "unused": {
Name: "unused",
Command: `unused`, Command: `unused`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/unused", InstallFrom: "honnef.co/go/tools/cmd/unused",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"varcheck": { "varcheck": {
Name: "varcheck",
Command: `varcheck`, Command: `varcheck`,
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
InstallFrom: "github.com/opennota/check/cmd/varcheck", InstallFrom: "github.com/opennota/check/cmd/varcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"vet": { "vet": {
Name: "vet", Command: `govet --no-recurse`,
Command: `go tool vet`,
Pattern: vetPattern, Pattern: vetPattern,
PartitionStrategy: partitionToPackageFileGlobs, InstallFrom: "github.com/dnephin/govet",
PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"vetshadow": { "vetshadow": {
Name: "vetshadow", Command: `govet --no-recurse --shadow`,
Command: `go tool vet --shadow`,
Pattern: vetPattern, Pattern: vetPattern,
PartitionStrategy: partitionToPackageFileGlobs, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },

View File

@@ -14,7 +14,7 @@ import (
"text/template" "text/template"
"time" "time"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
var ( var (
@@ -26,10 +26,10 @@ var (
) )
func setupFlags(app *kingpin.Application) { func setupFlags(app *kingpin.Application) {
app.Flag("config", "Load JSON configuration from file.").Action(loadConfig).String() app.Flag("config", "Load JSON configuration from file.").Envar("GOMETALINTER_CONFIG").Action(loadConfig).String()
app.Flag("disable", "Disable previously enabled linters.").PlaceHolder("LINTER").Short('D').Action(disableAction).Strings() app.Flag("disable", "Disable previously enabled linters.").PlaceHolder("LINTER").Short('D').Action(disableAction).Strings()
app.Flag("enable", "Enable previously disabled linters.").PlaceHolder("LINTER").Short('E').Action(enableAction).Strings() app.Flag("enable", "Enable previously disabled linters.").PlaceHolder("LINTER").Short('E').Action(enableAction).Strings()
app.Flag("linter", "Define a linter.").PlaceHolder("NAME:COMMAND:PATTERN").StringMapVar(&config.Linters) app.Flag("linter", "Define a linter.").PlaceHolder("NAME:COMMAND:PATTERN").Action(cliLinterOverrides).StringMap()
app.Flag("message-overrides", "Override message from linter. {message} will be expanded to the original message.").PlaceHolder("LINTER:MESSAGE").StringMapVar(&config.MessageOverride) app.Flag("message-overrides", "Override message from linter. {message} will be expanded to the original message.").PlaceHolder("LINTER:MESSAGE").StringMapVar(&config.MessageOverride)
app.Flag("severity", "Map of linter severities.").PlaceHolder("LINTER:SEVERITY").StringMapVar(&config.Severity) app.Flag("severity", "Map of linter severities.").PlaceHolder("LINTER:SEVERITY").StringMapVar(&config.Severity)
app.Flag("disable-all", "Disable all linters.").Action(disableAllAction).Bool() app.Flag("disable-all", "Disable all linters.").Action(disableAllAction).Bool()
@@ -51,19 +51,36 @@ func setupFlags(app *kingpin.Application) {
app.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength) app.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength)
app.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence) app.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence)
app.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences) app.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences)
app.Flag("min-const-length", "Minimumum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength) app.Flag("min-const-length", "Minimum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength)
app.Flag("dupl-threshold", "Minimum token sequence as a clone for dupl.").PlaceHolder("50").IntVar(&config.DuplThreshold) app.Flag("dupl-threshold", "Minimum token sequence as a clone for dupl.").PlaceHolder("50").IntVar(&config.DuplThreshold)
app.Flag("sort", fmt.Sprintf("Sort output by any of %s.", strings.Join(sortKeys, ", "))).PlaceHolder("none").EnumsVar(&config.Sort, sortKeys...) app.Flag("sort", fmt.Sprintf("Sort output by any of %s.", strings.Join(sortKeys, ", "))).PlaceHolder("none").EnumsVar(&config.Sort, sortKeys...)
app.Flag("tests", "Include test files for linters that support this option").Short('t').BoolVar(&config.Test) app.Flag("tests", "Include test files for linters that support this option.").Short('t').BoolVar(&config.Test)
app.Flag("deadline", "Cancel linters if they have not completed within this duration.").PlaceHolder("30s").DurationVar((*time.Duration)(&config.Deadline)) app.Flag("deadline", "Cancel linters if they have not completed within this duration.").PlaceHolder("30s").DurationVar((*time.Duration)(&config.Deadline))
app.Flag("errors", "Only show errors.").BoolVar(&config.Errors) app.Flag("errors", "Only show errors.").BoolVar(&config.Errors)
app.Flag("json", "Generate structured JSON rather than standard line-based output.").BoolVar(&config.JSON) app.Flag("json", "Generate structured JSON rather than standard line-based output.").BoolVar(&config.JSON)
app.Flag("checkstyle", "Generate checkstyle XML rather than standard line-based output.").BoolVar(&config.Checkstyle) app.Flag("checkstyle", "Generate checkstyle XML rather than standard line-based output.").BoolVar(&config.Checkstyle)
app.Flag("enable-gc", "Enable GC for linters (useful on large repositories).").BoolVar(&config.EnableGC) app.Flag("enable-gc", "Enable GC for linters (useful on large repositories).").BoolVar(&config.EnableGC)
app.Flag("aggregate", "Aggregate issues reported by several linters.").BoolVar(&config.Aggregate) app.Flag("aggregate", "Aggregate issues reported by several linters.").BoolVar(&config.Aggregate)
app.Flag("warn-unmatched-nolint", "Warn if a nolint directive is not matched with an issue.").BoolVar(&config.WarnUnmatchedDirective)
app.GetFlag("help").Short('h') app.GetFlag("help").Short('h')
} }
func cliLinterOverrides(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
// expected input structure - <name>:<command-spec>
parts := strings.SplitN(*element.Value, ":", 2)
if len(parts) < 2 {
return fmt.Errorf("incorrectly formatted input: %s", *element.Value)
}
name := parts[0]
spec := parts[1]
conf, err := parseLinterConfigSpec(name, spec)
if err != nil {
return fmt.Errorf("incorrectly formatted input: %s", *element.Value)
}
config.Linters[name] = StringOrLinterConfig(conf)
return nil
}
func loadConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error { func loadConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
r, err := os.Open(*element.Value) r, err := os.Open(*element.Value)
if err != nil { if err != nil {
@@ -114,12 +131,20 @@ func enableAllAction(app *kingpin.Application, element *kingpin.ParseElement, ct
return nil return nil
} }
type debugFunction func(format string, args ...interface{})
func debug(format string, args ...interface{}) { func debug(format string, args ...interface{}) {
if config.Debug { if config.Debug {
fmt.Fprintf(os.Stderr, "DEBUG: "+format+"\n", args...) fmt.Fprintf(os.Stderr, "DEBUG: "+format+"\n", args...)
} }
} }
func namespacedDebug(prefix string) debugFunction {
return func(format string, args ...interface{}) {
debug(prefix+format, args...)
}
}
func warning(format string, args ...interface{}) { func warning(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "WARNING: "+format+"\n", args...) fmt.Fprintf(os.Stderr, "WARNING: "+format+"\n", args...)
} }
@@ -131,8 +156,8 @@ func formatLinters() string {
if install == "()" { if install == "()" {
install = "" install = ""
} }
fmt.Fprintf(w, " %s %s\n %s\n %s\n", fmt.Fprintf(w, " %s: %s\n\tcommand: %s\n\tregex: %s\n\tfast: %t\n\tdefault enabled: %t\n\n",
linter.Name, install, linter.Command, linter.Pattern) linter.Name, install, linter.Command, linter.Pattern, linter.IsFast, linter.defaultEnabled)
} }
return w.String() return w.String()
} }
@@ -176,6 +201,9 @@ Severity override map (default is "warning"):
paths := resolvePaths(*pathsArg, config.Skip) paths := resolvePaths(*pathsArg, config.Skip)
linters := lintersFromConfig(config) linters := lintersFromConfig(config)
err := validateLinters(linters, config)
kingpin.FatalIfError(err, "")
issues, errch := runLinters(linters, paths, config.Concurrency, exclude, include) issues, errch := runLinters(linters, paths, config.Concurrency, exclude, include)
status := 0 status := 0
if config.JSON { if config.JSON {
@@ -198,7 +226,7 @@ Severity override map (default is "warning"):
func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Regexp) { func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Regexp) {
tmpl, err := template.New("output").Parse(config.Format) tmpl, err := template.New("output").Parse(config.Format)
kingpin.FatalIfError(err, "invalid format %q", config.Format) kingpin.FatalIfError(err, "invalid format %q", config.Format)
formatTemplate = tmpl config.formatTemplate = tmpl
// Linters are by their very nature, short lived, so disable GC. // Linters are by their very nature, short lived, so disable GC.
// Reduced (user) linting time on kingpin from 0.97s to 0.64s. // Reduced (user) linting time on kingpin from 0.97s to 0.64s.
@@ -340,8 +368,7 @@ func lintersFromConfig(config *Config) map[string]*Linter {
out := map[string]*Linter{} out := map[string]*Linter{}
config.Enable = replaceWithMegacheck(config.Enable, config.EnableAll) config.Enable = replaceWithMegacheck(config.Enable, config.EnableAll)
for _, name := range config.Enable { for _, name := range config.Enable {
linter := getLinterByName(name, config.Linters[name]) linter := getLinterByName(name, LinterConfig(config.Linters[name]))
if config.Fast && !linter.IsFast { if config.Fast && !linter.IsFast {
continue continue
} }

View File

@@ -1,6 +1,7 @@
package main package main
import ( import (
"encoding/json"
"fmt" "fmt"
"path/filepath" "path/filepath"
) )
@@ -10,6 +11,29 @@ const MaxCommandBytes = 32000
type partitionStrategy func([]string, []string) ([][]string, error) type partitionStrategy func([]string, []string) ([][]string, error)
func (ps *partitionStrategy) UnmarshalJSON(raw []byte) error {
var strategyName string
if err := json.Unmarshal(raw, &strategyName); err != nil {
return err
}
switch strategyName {
case "directories":
*ps = partitionPathsAsDirectories
case "files":
*ps = partitionPathsAsFiles
case "packages":
*ps = partitionPathsAsPackages
case "files-by-package":
*ps = partitionPathsAsFilesGroupedByPackage
case "single-directory":
*ps = partitionPathsByDirectory
default:
return fmt.Errorf("unknown parition strategy %s", strategyName)
}
return nil
}
func pathsToFileGlobs(paths []string) ([]string, error) { func pathsToFileGlobs(paths []string) ([]string, error) {
filePaths := []string{} filePaths := []string{}
for _, dir := range paths { for _, dir := range paths {
@@ -22,7 +46,7 @@ func pathsToFileGlobs(paths []string) ([]string, error) {
return filePaths, nil return filePaths, nil
} }
func partitionToMaxArgSize(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsDirectories(cmdArgs []string, paths []string) ([][]string, error) {
return partitionToMaxSize(cmdArgs, paths, MaxCommandBytes), nil return partitionToMaxSize(cmdArgs, paths, MaxCommandBytes), nil
} }
@@ -72,15 +96,15 @@ func (p *sizePartitioner) end() [][]string {
return p.parts return p.parts
} }
func partitionToMaxArgSizeWithFileGlobs(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsFiles(cmdArgs []string, paths []string) ([][]string, error) {
filePaths, err := pathsToFileGlobs(paths) filePaths, err := pathsToFileGlobs(paths)
if err != nil || len(filePaths) == 0 { if err != nil || len(filePaths) == 0 {
return nil, err return nil, err
} }
return partitionToMaxArgSize(cmdArgs, filePaths) return partitionPathsAsDirectories(cmdArgs, filePaths)
} }
func partitionToPackageFileGlobs(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsFilesGroupedByPackage(cmdArgs []string, paths []string) ([][]string, error) {
parts := [][]string{} parts := [][]string{}
for _, path := range paths { for _, path := range paths {
filePaths, err := pathsToFileGlobs([]string{path}) filePaths, err := pathsToFileGlobs([]string{path})
@@ -95,12 +119,12 @@ func partitionToPackageFileGlobs(cmdArgs []string, paths []string) ([][]string,
return parts, nil return parts, nil
} }
func partitionToMaxArgSizeWithPackagePaths(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsPackages(cmdArgs []string, paths []string) ([][]string, error) {
packagePaths, err := pathsToPackagePaths(paths) packagePaths, err := pathsToPackagePaths(paths)
if err != nil || len(packagePaths) == 0 { if err != nil || len(packagePaths) == 0 {
return nil, err return nil, err
} }
return partitionToMaxArgSize(cmdArgs, packagePaths) return partitionPathsAsDirectories(cmdArgs, packagePaths)
} }
func pathsToPackagePaths(paths []string) ([]string, error) { func pathsToPackagePaths(paths []string) ([]string, error) {
@@ -129,3 +153,11 @@ func packageNameFromPath(path string) (string, error) {
} }
return "", fmt.Errorf("%s not in GOPATH", path) return "", fmt.Errorf("%s not in GOPATH", path)
} }
func partitionPathsByDirectory(cmdArgs []string, paths []string) ([][]string, error) {
parts := [][]string{}
for _, path := range paths {
parts = append(parts, append(cmdArgs, path))
}
return parts, nil
}

21
tools/vendor/github.com/alexkohler/nakedret/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Alex Kohler
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

103
tools/vendor/github.com/alexkohler/nakedret/README.md generated vendored Normal file
View File

@@ -0,0 +1,103 @@
# nakedret
nakedret is a Go static analysis tool to find naked returns in functions greater than a specified function length.
## Installation
go get -u github.com/alexkohler/nakedret
## Usage
Similar to other Go static anaylsis tools (such as golint, go vet) , nakedret can be invoked with one or more filenames, directories, or packages named by its import path. Nakedret also supports the `...` wildcard.
nakedret [flags] files/directories/packages
Currently, the only flag supported is -l, which is an optional numeric flag to specify the maximum length a function can be (in terms of line length). If not specified, it defaults to 5.
## Purpose
As noted in Go's [Code Review comments](https://github.com/golang/go/wiki/CodeReviewComments#named-result-parameters):
> Naked returns are okay if the function is a handful of lines. Once it's a medium sized function, be explicit with your return
> values. Corollary: it's not worth it to name result parameters just because it enables you to use naked returns. Clarity of docs is always more important than saving a line or two in your function.
This tool aims to catch naked returns on non-trivial functions.
## Example
Let's take the `types` package in the Go source as an example:
```Bash
$ nakedret -l 25 types/
types/check.go:245 checkFiles naked returns on 26 line function
types/typexpr.go:443 collectParams naked returns on 53 line function
types/stmt.go:275 caseTypes naked returns on 27 line function
types/lookup.go:275 MissingMethod naked returns on 39 line function
```
Below is one of the not so intuitive uses of naked returns in `types/lookup.go` found by nakedret (nakedret will return the line number of the last naked return in the function):
```Go
func MissingMethod(V Type, T *Interface, static bool) (method *Func, wrongType bool) {
// fast path for common case
if T.Empty() {
return
}
// TODO(gri) Consider using method sets here. Might be more efficient.
if ityp, _ := V.Underlying().(*Interface); ityp != nil {
// TODO(gri) allMethods is sorted - can do this more efficiently
for _, m := range T.allMethods {
_, obj := lookupMethod(ityp.allMethods, m.pkg, m.name)
switch {
case obj == nil:
if static {
return m, false
}
case !Identical(obj.Type(), m.typ):
return m, true
}
}
return
}
// A concrete type implements T if it implements all methods of T.
for _, m := range T.allMethods {
obj, _, _ := lookupFieldOrMethod(V, false, m.pkg, m.name)
f, _ := obj.(*Func)
if f == nil {
return m, false
}
if !Identical(f.typ, m.typ) {
return m, true
}
}
return
}
```
## TODO
- Unit tests (may require some refactoring to do correctly)
- supporting toggling of `build.Context.UseAllFiles` may be useful for some.
- Configuration on whether or not to run on test files
- Vim quickfix format?
- Globbing support (e.g. nakedret *.go)
## Contributing
Pull requests welcome!
## Other static analysis tools
If you've enjoyed nakedret, take a look at my other static anaylsis tools!
- [unimport](https://github.com/alexkohler/unimport) - Finds unnecessary import aliases
- [prealloc](https://github.com/alexkohler/prealloc) - Finds slice declarations that could potentially be preallocated.

310
tools/vendor/github.com/alexkohler/nakedret/import.go generated vendored Normal file
View File

@@ -0,0 +1,310 @@
package main
/*
This file holds a direct copy of the import path matching code of
https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be
replaced when https://golang.org/issue/8768 is resolved.
It has been updated to follow upstream changes in a few ways.
*/
import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"regexp"
"runtime"
"strings"
)
var buildContext = build.Default
var (
goroot = filepath.Clean(runtime.GOROOT())
gorootSrc = filepath.Join(goroot, "src")
)
// importPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func importPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if a == "all" || a == "std" {
out = append(out, allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// importPaths returns the import paths to use for the given command line.
func importPaths(args []string) []string {
args = importPathsNoDotExpansion(args)
var out []string
for _, a := range args {
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, allPackagesInFS(a)...)
} else {
out = append(out, allPackages(a)...)
}
continue
}
out = append(out, a)
}
return out
}
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
func matchPattern(pattern string) func(name string) bool {
re := regexp.QuoteMeta(pattern)
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
// Special case: foo/... matches foo too.
if strings.HasSuffix(re, `/.*`) {
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
}
reg := regexp.MustCompile(`^` + re + `$`)
return func(name string) bool {
return reg.MatchString(name)
}
}
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
}
}
// allPackages returns all the packages that can be found
// under the $GOPATH directories and $GOROOT matching pattern.
// The pattern is either "all" (all packages), "std" (standard packages)
// or a path including "...".
func allPackages(pattern string) []string {
pkgs := matchPackages(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if pattern != "all" && pattern != "std" {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !buildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
// Commands
cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator)
filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == cmd {
return nil
}
name := path[len(cmd):]
if !treeCanMatch(name) {
return filepath.SkipDir
}
// Commands are all in cmd/, not in subdirectories.
if strings.Contains(name, string(filepath.Separator)) {
return filepath.SkipDir
}
// We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
name = "cmd/" + name
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
for _, src := range buildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
// Avoid .foo, _foo, testdata and vendor directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" {
return filepath.SkipDir
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
return filepath.SkipDir
}
if !treeCanMatch(name) {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func allPackagesInFS(pattern string) []string {
pkgs := matchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, testdata and vendor directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
if _, err = build.ImportDir(path, 0); err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}

213
tools/vendor/github.com/alexkohler/nakedret/nakedret.go generated vendored Normal file
View File

@@ -0,0 +1,213 @@
package main
import (
"errors"
"flag"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"log"
"os"
"path/filepath"
"strings"
)
const (
pwd = "./"
)
func init() {
//TODO allow build tags
build.Default.UseAllFiles = true
}
func usage() {
log.Printf("Usage of %s:\n", os.Args[0])
log.Printf("\nnakedret [flags] # runs on package in current directory\n")
log.Printf("\nnakedret [flags] [packages]\n")
log.Printf("Flags:\n")
flag.PrintDefaults()
}
type returnsVisitor struct {
f *token.FileSet
maxLength uint
}
func main() {
// Remove log timestamp
log.SetFlags(0)
maxLength := flag.Uint("l", 5, "maximum number of lines for a naked return function")
flag.Usage = usage
flag.Parse()
if err := checkNakedReturns(flag.Args(), maxLength); err != nil {
log.Println(err)
}
}
func checkNakedReturns(args []string, maxLength *uint) error {
fset := token.NewFileSet()
files, err := parseInput(args, fset)
if err != nil {
return fmt.Errorf("could not parse input %v", err)
}
if maxLength == nil {
return errors.New("max length nil")
}
retVis := &returnsVisitor{
f: fset,
maxLength: *maxLength,
}
for _, f := range files {
ast.Walk(retVis, f)
}
return nil
}
func parseInput(args []string, fset *token.FileSet) ([]*ast.File, error) {
var directoryList []string
var fileMode bool
files := make([]*ast.File, 0)
if len(args) == 0 {
directoryList = append(directoryList, pwd)
} else {
for _, arg := range args {
if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) {
for _, dirname := range allPackagesInFS(arg) {
directoryList = append(directoryList, dirname)
}
} else if isDir(arg) {
directoryList = append(directoryList, arg)
} else if exists(arg) {
if strings.HasSuffix(arg, ".go") {
fileMode = true
f, err := parser.ParseFile(fset, arg, nil, 0)
if err != nil {
return nil, err
}
files = append(files, f)
} else {
return nil, fmt.Errorf("invalid file %v specified", arg)
}
} else {
//TODO clean this up a bit
imPaths := importPaths([]string{arg})
for _, importPath := range imPaths {
pkg, err := build.Import(importPath, ".", 0)
if err != nil {
return nil, err
}
var stringFiles []string
stringFiles = append(stringFiles, pkg.GoFiles...)
// files = append(files, pkg.CgoFiles...)
stringFiles = append(stringFiles, pkg.TestGoFiles...)
if pkg.Dir != "." {
for i, f := range stringFiles {
stringFiles[i] = filepath.Join(pkg.Dir, f)
}
}
fileMode = true
for _, stringFile := range stringFiles {
f, err := parser.ParseFile(fset, stringFile, nil, 0)
if err != nil {
return nil, err
}
files = append(files, f)
}
}
}
}
}
// if we're not in file mode, then we need to grab each and every package in each directory
// we can to grab all the files
if !fileMode {
for _, fpath := range directoryList {
pkgs, err := parser.ParseDir(fset, fpath, nil, 0)
if err != nil {
return nil, err
}
for _, pkg := range pkgs {
for _, f := range pkg.Files {
files = append(files, f)
}
}
}
}
return files, nil
}
func isDir(filename string) bool {
fi, err := os.Stat(filename)
return err == nil && fi.IsDir()
}
func exists(filename string) bool {
_, err := os.Stat(filename)
return err == nil
}
func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
var namedReturns []*ast.Ident
funcDecl, ok := node.(*ast.FuncDecl)
if !ok {
return v
}
var functionLineLength int
// We've found a function
if funcDecl.Type != nil && funcDecl.Type.Results != nil {
for _, field := range funcDecl.Type.Results.List {
for _, ident := range field.Names {
if ident != nil {
namedReturns = append(namedReturns, ident)
}
}
}
file := v.f.File(funcDecl.Pos())
functionLineLength = file.Position(funcDecl.End()).Line - file.Position(funcDecl.Pos()).Line
}
if len(namedReturns) > 0 && funcDecl.Body != nil {
// Scan the body for usage of the named returns
for _, stmt := range funcDecl.Body.List {
switch s := stmt.(type) {
case *ast.ReturnStmt:
if len(s.Results) == 0 {
file := v.f.File(s.Pos())
if file != nil && uint(functionLineLength) > v.maxLength {
if funcDecl.Name != nil {
log.Printf("%v:%v %v naked returns on %v line function \n", file.Name(), file.Position(s.Pos()).Line, funcDecl.Name.Name, functionLineLength)
}
}
continue
}
default:
}
}
}
return v
}

View File

@@ -1,3 +1,4 @@
// The misspell command corrects commonly misspelled English words in source files.
package main package main
import ( import (

33
tools/vendor/github.com/dnephin/govet/README generated vendored Normal file
View File

@@ -0,0 +1,33 @@
Vet is a tool that checks correctness of Go programs. It runs a suite of tests,
each tailored to check for a particular class of errors. Examples include incorrect
Printf format verbs or malformed build tags.
Over time many checks have been added to vet's suite, but many more have been
rejected as not appropriate for the tool. The criteria applied when selecting which
checks to add are:
Correctness:
Vet's tools are about correctness, not style. A vet check must identify real or
potential bugs that could cause incorrect compilation or execution. A check that
only identifies stylistic points or alternative correct approaches to a situation
is not acceptable.
Frequency:
Vet is run every day by many programmers, often as part of every compilation or
submission. The cost in execution time is considerable, especially in aggregate,
so checks must be likely enough to find real problems that they are worth the
overhead of the added check. A new check that finds only a handful of problems
across all existing programs, even if the problem is significant, is not worth
adding to the suite everyone runs daily.
Precision:
Most of vet's checks are heuristic and can generate both false positives (flagging
correct programs) and false negatives (not flagging incorrect ones). The rate of
both these failures must be very small. A check that is too noisy will be ignored
by the programmer overwhelmed by the output; a check that misses too many of the
cases it's looking for will give a false sense of security. Neither is acceptable.
A vet check must be accurate enough that everything it reports is worth examining,
and complete enough to encourage real confidence.

682
tools/vendor/github.com/dnephin/govet/asmdecl.go generated vendored Normal file
View File

@@ -0,0 +1,682 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Identify mismatches between assembly files and Go func declarations.
package main
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"regexp"
"strconv"
"strings"
)
// 'kind' is a kind of assembly variable.
// The kinds 1, 2, 4, 8 stand for values of that size.
type asmKind int
// These special kinds are not valid sizes.
const (
asmString asmKind = 100 + iota
asmSlice
asmInterface
asmEmptyInterface
)
// An asmArch describes assembly parameters for an architecture
type asmArch struct {
name string
ptrSize int
intSize int
maxAlign int
bigEndian bool
stack string
lr bool
}
// An asmFunc describes the expected variables for a function on a given architecture.
type asmFunc struct {
arch *asmArch
size int // size of all arguments
vars map[string]*asmVar
varByOffset map[int]*asmVar
}
// An asmVar describes a single assembly variable.
type asmVar struct {
name string
kind asmKind
typ string
off int
size int
inner []*asmVar
}
var (
asmArch386 = asmArch{"386", 4, 4, 4, false, "SP", false}
asmArchArm = asmArch{"arm", 4, 4, 4, false, "R13", true}
asmArchArm64 = asmArch{"arm64", 8, 8, 8, false, "RSP", true}
asmArchAmd64 = asmArch{"amd64", 8, 8, 8, false, "SP", false}
asmArchAmd64p32 = asmArch{"amd64p32", 4, 4, 8, false, "SP", false}
asmArchMips64 = asmArch{"mips64", 8, 8, 8, true, "R29", true}
asmArchMips64LE = asmArch{"mips64", 8, 8, 8, false, "R29", true}
asmArchPpc64 = asmArch{"ppc64", 8, 8, 8, true, "R1", true}
asmArchPpc64LE = asmArch{"ppc64le", 8, 8, 8, false, "R1", true}
arches = []*asmArch{
&asmArch386,
&asmArchArm,
&asmArchArm64,
&asmArchAmd64,
&asmArchAmd64p32,
&asmArchMips64,
&asmArchMips64LE,
&asmArchPpc64,
&asmArchPpc64LE,
}
)
var (
re = regexp.MustCompile
asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`)
asmTEXT = re(`\bTEXT\b.*·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`)
asmDATA = re(`\b(DATA|GLOBL)\b`)
asmNamedFP = re(`([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`)
asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`)
asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`)
)
func asmCheck(pkg *Package) {
if !vet("asmdecl") {
return
}
// No work if no assembly files.
if !pkg.hasFileWithSuffix(".s") {
return
}
// Gather declarations. knownFunc[name][arch] is func description.
knownFunc := make(map[string]map[string]*asmFunc)
for _, f := range pkg.files {
if f.file != nil {
for _, decl := range f.file.Decls {
if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
knownFunc[decl.Name.Name] = f.asmParseDecl(decl)
}
}
}
}
Files:
for _, f := range pkg.files {
if !strings.HasSuffix(f.name, ".s") {
continue
}
Println("Checking file", f.name)
// Determine architecture from file name if possible.
var arch string
var archDef *asmArch
for _, a := range arches {
if strings.HasSuffix(f.name, "_"+a.name+".s") {
arch = a.name
archDef = a
break
}
}
lines := strings.SplitAfter(string(f.content), "\n")
var (
fn *asmFunc
fnName string
localSize, argSize int
wroteSP bool
haveRetArg bool
retLine []int
)
flushRet := func() {
if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
v := fn.vars["ret"]
for _, line := range retLine {
f.Badf(token.NoPos, "%s:%d: [%s] %s: RET without writing to %d-byte ret+%d(FP)", f.name, line, arch, fnName, v.size, v.off)
}
}
retLine = nil
}
for lineno, line := range lines {
lineno++
badf := func(format string, args ...interface{}) {
f.Badf(token.NoPos, "%s:%d: [%s] %s: %s", f.name, lineno, arch, fnName, fmt.Sprintf(format, args...))
}
if arch == "" {
// Determine architecture from +build line if possible.
if m := asmPlusBuild.FindStringSubmatch(line); m != nil {
Fields:
for _, fld := range strings.Fields(m[1]) {
for _, a := range arches {
if a.name == fld {
arch = a.name
archDef = a
break Fields
}
}
}
}
}
if m := asmTEXT.FindStringSubmatch(line); m != nil {
flushRet()
if arch == "" {
f.Warnf(token.NoPos, "%s: cannot determine architecture for assembly file", f.name)
continue Files
}
fnName = m[1]
fn = knownFunc[m[1]][arch]
if fn != nil {
size, _ := strconv.Atoi(m[4])
if size != fn.size && (m[2] != "7" && !strings.Contains(m[2], "NOSPLIT") || size != 0) {
badf("wrong argument size %d; expected $...-%d", size, fn.size)
}
}
localSize, _ = strconv.Atoi(m[3])
localSize += archDef.intSize
if archDef.lr {
// Account for caller's saved LR
localSize += archDef.intSize
}
argSize, _ = strconv.Atoi(m[4])
if fn == nil && !strings.Contains(fnName, "<>") {
badf("function %s missing Go declaration", fnName)
}
wroteSP = false
haveRetArg = false
continue
} else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") {
// function, but not visible from Go (didn't match asmTEXT), so stop checking
flushRet()
fn = nil
fnName = ""
continue
}
if strings.Contains(line, "RET") {
retLine = append(retLine, lineno)
}
if fnName == "" {
continue
}
if asmDATA.FindStringSubmatch(line) != nil {
fn = nil
}
if archDef == nil {
continue
}
if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) {
wroteSP = true
continue
}
for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
if m[3] != archDef.stack || wroteSP {
continue
}
off := 0
if m[1] != "" {
off, _ = strconv.Atoi(m[2])
}
if off >= localSize {
if fn != nil {
v := fn.varByOffset[off-localSize]
if v != nil {
badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize)
continue
}
}
if off >= localSize+argSize {
badf("use of %s points beyond argument frame", m[1])
continue
}
badf("use of %s to access argument frame", m[1])
}
}
if fn == nil {
continue
}
for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) {
off, _ := strconv.Atoi(m[2])
v := fn.varByOffset[off]
if v != nil {
badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off)
} else {
badf("use of unnamed argument %s", m[1])
}
}
for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) {
name := m[1]
off := 0
if m[2] != "" {
off, _ = strconv.Atoi(m[2])
}
if name == "ret" || strings.HasPrefix(name, "ret_") {
haveRetArg = true
}
v := fn.vars[name]
if v == nil {
// Allow argframe+0(FP).
if name == "argframe" && off == 0 {
continue
}
v = fn.varByOffset[off]
if v != nil {
badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off)
} else {
badf("unknown variable %s", name)
}
continue
}
asmCheckVar(badf, fn, line, m[0], off, v)
}
}
flushRet()
}
}
// asmParseDecl parses a function decl for expected assembly variables.
func (f *File) asmParseDecl(decl *ast.FuncDecl) map[string]*asmFunc {
var (
arch *asmArch
fn *asmFunc
offset int
failed bool
)
addVar := func(outer string, v asmVar) {
if vo := fn.vars[outer]; vo != nil {
vo.inner = append(vo.inner, &v)
}
fn.vars[v.name] = &v
for i := 0; i < v.size; i++ {
fn.varByOffset[v.off+i] = &v
}
}
addParams := func(list []*ast.Field) {
for i, fld := range list {
// Determine alignment, size, and kind of type in declaration.
var align, size int
var kind asmKind
names := fld.Names
typ := f.gofmt(fld.Type)
switch t := fld.Type.(type) {
default:
switch typ {
default:
f.Warnf(fld.Type.Pos(), "unknown assembly argument type %s", typ)
failed = true
return
case "int8", "uint8", "byte", "bool":
size = 1
case "int16", "uint16":
size = 2
case "int32", "uint32", "float32":
size = 4
case "int64", "uint64", "float64":
align = arch.maxAlign
size = 8
case "int", "uint":
size = arch.intSize
case "uintptr", "iword", "Word", "Errno", "unsafe.Pointer":
size = arch.ptrSize
case "string", "ErrorString":
size = arch.ptrSize * 2
align = arch.ptrSize
kind = asmString
}
case *ast.ChanType, *ast.FuncType, *ast.MapType, *ast.StarExpr:
size = arch.ptrSize
case *ast.InterfaceType:
align = arch.ptrSize
size = 2 * arch.ptrSize
if len(t.Methods.List) > 0 {
kind = asmInterface
} else {
kind = asmEmptyInterface
}
case *ast.ArrayType:
if t.Len == nil {
size = arch.ptrSize + 2*arch.intSize
align = arch.ptrSize
kind = asmSlice
break
}
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
failed = true
case *ast.StructType:
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
failed = true
}
if align == 0 {
align = size
}
if kind == 0 {
kind = asmKind(size)
}
offset += -offset & (align - 1)
// Create variable for each name being declared with this type.
if len(names) == 0 {
name := "unnamed"
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 && &list[0] == &decl.Type.Results.List[0] && i == 0 {
// Assume assembly will refer to single unnamed result as r.
name = "ret"
}
names = []*ast.Ident{{Name: name}}
}
for _, id := range names {
name := id.Name
addVar("", asmVar{
name: name,
kind: kind,
typ: typ,
off: offset,
size: size,
})
switch kind {
case 8:
if arch.ptrSize == 4 {
w1, w2 := "lo", "hi"
if arch.bigEndian {
w1, w2 = w2, w1
}
addVar(name, asmVar{
name: name + "_" + w1,
kind: 4,
typ: "half " + typ,
off: offset,
size: 4,
})
addVar(name, asmVar{
name: name + "_" + w2,
kind: 4,
typ: "half " + typ,
off: offset + 4,
size: 4,
})
}
case asmEmptyInterface:
addVar(name, asmVar{
name: name + "_type",
kind: asmKind(arch.ptrSize),
typ: "interface type",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_data",
kind: asmKind(arch.ptrSize),
typ: "interface data",
off: offset + arch.ptrSize,
size: arch.ptrSize,
})
case asmInterface:
addVar(name, asmVar{
name: name + "_itable",
kind: asmKind(arch.ptrSize),
typ: "interface itable",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_data",
kind: asmKind(arch.ptrSize),
typ: "interface data",
off: offset + arch.ptrSize,
size: arch.ptrSize,
})
case asmSlice:
addVar(name, asmVar{
name: name + "_base",
kind: asmKind(arch.ptrSize),
typ: "slice base",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_len",
kind: asmKind(arch.intSize),
typ: "slice len",
off: offset + arch.ptrSize,
size: arch.intSize,
})
addVar(name, asmVar{
name: name + "_cap",
kind: asmKind(arch.intSize),
typ: "slice cap",
off: offset + arch.ptrSize + arch.intSize,
size: arch.intSize,
})
case asmString:
addVar(name, asmVar{
name: name + "_base",
kind: asmKind(arch.ptrSize),
typ: "string base",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_len",
kind: asmKind(arch.intSize),
typ: "string len",
off: offset + arch.ptrSize,
size: arch.intSize,
})
}
offset += size
}
}
}
m := make(map[string]*asmFunc)
for _, arch = range arches {
fn = &asmFunc{
arch: arch,
vars: make(map[string]*asmVar),
varByOffset: make(map[int]*asmVar),
}
offset = 0
addParams(decl.Type.Params.List)
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 {
offset += -offset & (arch.maxAlign - 1)
addParams(decl.Type.Results.List)
}
fn.size = offset
m[arch.name] = fn
}
if failed {
return nil
}
return m
}
// asmCheckVar checks a single variable reference.
func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar) {
m := asmOpcode.FindStringSubmatch(line)
if m == nil {
if !strings.HasPrefix(strings.TrimSpace(line), "//") {
badf("cannot find assembly opcode")
}
return
}
// Determine operand sizes from instruction.
// Typically the suffix suffices, but there are exceptions.
var src, dst, kind asmKind
op := m[1]
switch fn.arch.name + "." + op {
case "386.FMOVLP":
src, dst = 8, 4
case "arm.MOVD":
src = 8
case "arm.MOVW":
src = 4
case "arm.MOVH", "arm.MOVHU":
src = 2
case "arm.MOVB", "arm.MOVBU":
src = 1
// LEA* opcodes don't really read the second arg.
// They just take the address of it.
case "386.LEAL":
dst = 4
case "amd64.LEAQ":
dst = 8
case "amd64p32.LEAL":
dst = 4
default:
switch fn.arch.name {
case "386", "amd64":
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) {
// FMOVDP, FXCHD, etc
src = 8
break
}
if strings.HasPrefix(op, "P") && strings.HasSuffix(op, "RD") {
// PINSRD, PEXTRD, etc
src = 4
break
}
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) {
// FMOVFP, FXCHF, etc
src = 4
break
}
if strings.HasSuffix(op, "SD") {
// MOVSD, SQRTSD, etc
src = 8
break
}
if strings.HasSuffix(op, "SS") {
// MOVSS, SQRTSS, etc
src = 4
break
}
if strings.HasPrefix(op, "SET") {
// SETEQ, etc
src = 1
break
}
switch op[len(op)-1] {
case 'B':
src = 1
case 'W':
src = 2
case 'L':
src = 4
case 'D', 'Q':
src = 8
}
case "ppc64", "ppc64le":
// Strip standard suffixes to reveal size letter.
m := ppc64Suff.FindStringSubmatch(op)
if m != nil {
switch m[1][0] {
case 'B':
src = 1
case 'H':
src = 2
case 'W':
src = 4
case 'D':
src = 8
}
}
case "mips64", "mips64le":
switch op {
case "MOVB", "MOVBU":
src = 1
case "MOVH", "MOVHU":
src = 2
case "MOVW", "MOVWU", "MOVF":
src = 4
case "MOVV", "MOVD":
src = 8
}
}
}
if dst == 0 {
dst = src
}
// Determine whether the match we're holding
// is the first or second argument.
if strings.Index(line, expr) > strings.Index(line, ",") {
kind = dst
} else {
kind = src
}
vk := v.kind
vt := v.typ
switch vk {
case asmInterface, asmEmptyInterface, asmString, asmSlice:
// allow reference to first word (pointer)
vk = v.inner[0].kind
vt = v.inner[0].typ
}
if off != v.off {
var inner bytes.Buffer
for i, vi := range v.inner {
if len(v.inner) > 1 {
fmt.Fprintf(&inner, ",")
}
fmt.Fprintf(&inner, " ")
if i == len(v.inner)-1 {
fmt.Fprintf(&inner, "or ")
}
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
}
badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String())
return
}
if kind != 0 && kind != vk {
var inner bytes.Buffer
if len(v.inner) > 0 {
fmt.Fprintf(&inner, " containing")
for i, vi := range v.inner {
if i > 0 && len(v.inner) > 2 {
fmt.Fprintf(&inner, ",")
}
fmt.Fprintf(&inner, " ")
if i > 0 && i == len(v.inner)-1 {
fmt.Fprintf(&inner, "and ")
}
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
}
}
badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vk, inner.String())
}
}

49
tools/vendor/github.com/dnephin/govet/assign.go generated vendored Normal file
View File

@@ -0,0 +1,49 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for useless assignments.
*/
package main
import (
"go/ast"
"go/token"
"reflect"
)
func init() {
register("assign",
"check for useless assignments",
checkAssignStmt,
assignStmt)
}
// TODO: should also check for assignments to struct fields inside methods
// that are on T instead of *T.
// checkAssignStmt checks for assignments of the form "<expr> = <expr>".
// These are almost always useless, and even when they aren't they are usually a mistake.
func checkAssignStmt(f *File, node ast.Node) {
stmt := node.(*ast.AssignStmt)
if stmt.Tok != token.ASSIGN {
return // ignore :=
}
if len(stmt.Lhs) != len(stmt.Rhs) {
// If LHS and RHS have different cardinality, they can't be the same.
return
}
for i, lhs := range stmt.Lhs {
rhs := stmt.Rhs[i]
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
continue // short-circuit the heavy-weight gofmt check
}
le := f.gofmt(lhs)
re := f.gofmt(rhs)
if le == re {
f.Badf(stmt.Pos(), "self-assignment of %s to %s", re, le)
}
}
}

69
tools/vendor/github.com/dnephin/govet/atomic.go generated vendored Normal file
View File

@@ -0,0 +1,69 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("atomic",
"check for common mistaken usages of the sync/atomic package",
checkAtomicAssignment,
assignStmt)
}
// checkAtomicAssignment walks the assignment statement checking for common
// mistaken usage of atomic package, such as: x = atomic.AddUint64(&x, 1)
func checkAtomicAssignment(f *File, node ast.Node) {
n := node.(*ast.AssignStmt)
if len(n.Lhs) != len(n.Rhs) {
return
}
if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
return
}
for i, right := range n.Rhs {
call, ok := right.(*ast.CallExpr)
if !ok {
continue
}
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
continue
}
pkg, ok := sel.X.(*ast.Ident)
if !ok || pkg.Name != "atomic" {
continue
}
switch sel.Sel.Name {
case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
f.checkAtomicAddAssignment(n.Lhs[i], call)
}
}
}
// checkAtomicAddAssignment walks the atomic.Add* method calls checking for assigning the return value
// to the same variable being used in the operation
func (f *File) checkAtomicAddAssignment(left ast.Expr, call *ast.CallExpr) {
if len(call.Args) != 2 {
return
}
arg := call.Args[0]
broken := false
if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
broken = f.gofmt(left) == f.gofmt(uarg.X)
} else if star, ok := left.(*ast.StarExpr); ok {
broken = f.gofmt(star.X) == f.gofmt(arg)
}
if broken {
f.Bad(left.Pos(), "direct assignment to atomic value")
}
}

186
tools/vendor/github.com/dnephin/govet/bool.go generated vendored Normal file
View File

@@ -0,0 +1,186 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains boolean condition tests.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("bool",
"check for mistakes involving boolean operators",
checkBool,
binaryExpr)
}
func checkBool(f *File, n ast.Node) {
e := n.(*ast.BinaryExpr)
var op boolOp
switch e.Op {
case token.LOR:
op = or
case token.LAND:
op = and
default:
return
}
comm := op.commutativeSets(e)
for _, exprs := range comm {
op.checkRedundant(f, exprs)
op.checkSuspect(f, exprs)
}
}
type boolOp struct {
name string
tok token.Token // token corresponding to this operator
badEq token.Token // token corresponding to the equality test that should not be used with this operator
}
var (
or = boolOp{"or", token.LOR, token.NEQ}
and = boolOp{"and", token.LAND, token.EQL}
)
// commutativeSets returns all side effect free sets of
// expressions in e that are connected by op.
// For example, given 'a || b || f() || c || d' with the or op,
// commutativeSets returns {{b, a}, {d, c}}.
func (op boolOp) commutativeSets(e *ast.BinaryExpr) [][]ast.Expr {
exprs := op.split(e)
// Partition the slice of expressions into commutative sets.
i := 0
var sets [][]ast.Expr
for j := 0; j <= len(exprs); j++ {
if j == len(exprs) || hasSideEffects(exprs[j]) {
if i < j {
sets = append(sets, exprs[i:j])
}
i = j + 1
}
}
return sets
}
// checkRedundant checks for expressions of the form
// e && e
// e || e
// Exprs must contain only side effect free expressions.
func (op boolOp) checkRedundant(f *File, exprs []ast.Expr) {
seen := make(map[string]bool)
for _, e := range exprs {
efmt := f.gofmt(e)
if seen[efmt] {
f.Badf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
} else {
seen[efmt] = true
}
}
}
// checkSuspect checks for expressions of the form
// x != c1 || x != c2
// x == c1 && x == c2
// where c1 and c2 are constant expressions.
// If c1 and c2 are the same then it's redundant;
// if c1 and c2 are different then it's always true or always false.
// Exprs must contain only side effect free expressions.
func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
// seen maps from expressions 'x' to equality expressions 'x != c'.
seen := make(map[string]string)
for _, e := range exprs {
bin, ok := e.(*ast.BinaryExpr)
if !ok || bin.Op != op.badEq {
continue
}
// In order to avoid false positives, restrict to cases
// in which one of the operands is constant. We're then
// interested in the other operand.
// In the rare case in which both operands are constant
// (e.g. runtime.GOOS and "windows"), we'll only catch
// mistakes if the LHS is repeated, which is how most
// code is written.
var x ast.Expr
switch {
case f.pkg.types[bin.Y].Value != nil:
x = bin.X
case f.pkg.types[bin.X].Value != nil:
x = bin.Y
default:
continue
}
// e is of the form 'x != c' or 'x == c'.
xfmt := f.gofmt(x)
efmt := f.gofmt(e)
if prev, found := seen[xfmt]; found {
// checkRedundant handles the case in which efmt == prev.
if efmt != prev {
f.Badf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
}
} else {
seen[xfmt] = efmt
}
}
}
// hasSideEffects reports whether evaluation of e has side effects.
func hasSideEffects(e ast.Expr) bool {
safe := true
ast.Inspect(e, func(node ast.Node) bool {
switch n := node.(type) {
// Using CallExpr here will catch conversions
// as well as function and method invocations.
// We'll live with the false negatives for now.
case *ast.CallExpr:
safe = false
return false
case *ast.UnaryExpr:
if n.Op == token.ARROW {
safe = false
return false
}
}
return true
})
return !safe
}
// split returns a slice of all subexpressions in e that are connected by op.
// For example, given 'a || (b || c) || d' with the or op,
// split returns []{d, c, b, a}.
func (op boolOp) split(e ast.Expr) (exprs []ast.Expr) {
for {
e = unparen(e)
if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok {
exprs = append(exprs, op.split(b.Y)...)
e = b.X
} else {
exprs = append(exprs, e)
break
}
}
return
}
// unparen returns e with any enclosing parentheses stripped.
func unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

91
tools/vendor/github.com/dnephin/govet/buildtag.go generated vendored Normal file
View File

@@ -0,0 +1,91 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"os"
"strings"
"unicode"
)
var (
nl = []byte("\n")
slashSlash = []byte("//")
plusBuild = []byte("+build")
)
// checkBuildTag checks that build tags are in the correct location and well-formed.
func checkBuildTag(name string, data []byte) {
if !vet("buildtags") {
return
}
lines := bytes.SplitAfter(data, nl)
// Determine cutpoint where +build comments are no longer valid.
// They are valid in leading // comments in the file followed by
// a blank line.
var cutoff int
for i, line := range lines {
line = bytes.TrimSpace(line)
if len(line) == 0 {
cutoff = i
continue
}
if bytes.HasPrefix(line, slashSlash) {
continue
}
break
}
for i, line := range lines {
line = bytes.TrimSpace(line)
if !bytes.HasPrefix(line, slashSlash) {
continue
}
text := bytes.TrimSpace(line[2:])
if bytes.HasPrefix(text, plusBuild) {
fields := bytes.Fields(text)
if !bytes.Equal(fields[0], plusBuild) {
// Comment is something like +buildasdf not +build.
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
continue
}
if i >= cutoff {
fmt.Fprintf(os.Stderr, "%s:%d: +build comment must appear before package clause and be followed by a blank line\n", name, i+1)
setExit(1)
continue
}
// Check arguments.
Args:
for _, arg := range fields[1:] {
for _, elem := range strings.Split(string(arg), ",") {
if strings.HasPrefix(elem, "!!") {
fmt.Fprintf(os.Stderr, "%s:%d: invalid double negative in build constraint: %s\n", name, i+1, arg)
setExit(1)
break Args
}
if strings.HasPrefix(elem, "!") {
elem = elem[1:]
}
for _, c := range elem {
if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
fmt.Fprintf(os.Stderr, "%s:%d: invalid non-alphanumeric build constraint: %s\n", name, i+1, arg)
setExit(1)
break Args
}
}
}
}
continue
}
// Comment with +build but not at beginning.
if bytes.Contains(line, plusBuild) && i < cutoff {
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
continue
}
}
}

130
tools/vendor/github.com/dnephin/govet/cgo.go generated vendored Normal file
View File

@@ -0,0 +1,130 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for invalid cgo pointer passing.
// This looks for code that uses cgo to call C code passing values
// whose types are almost always invalid according to the cgo pointer
// sharing rules.
// Specifically, it warns about attempts to pass a Go chan, map, func,
// or slice to C, either directly, or via a pointer, array, or struct.
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("cgocall",
"check for types that may not be passed to cgo calls",
checkCgoCall,
callExpr)
}
func checkCgoCall(f *File, node ast.Node) {
x := node.(*ast.CallExpr)
// We are only looking for calls to functions imported from
// the "C" package.
sel, ok := x.Fun.(*ast.SelectorExpr)
if !ok {
return
}
id, ok := sel.X.(*ast.Ident)
if !ok || id.Name != "C" {
return
}
for _, arg := range x.Args {
if !typeOKForCgoCall(cgoBaseType(f, arg)) {
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
}
// Check for passing the address of a bad type.
if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 && f.hasBasicType(conv.Fun, types.UnsafePointer) {
arg = conv.Args[0]
}
if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND {
if !typeOKForCgoCall(cgoBaseType(f, u.X)) {
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
}
}
}
}
// cgoBaseType tries to look through type conversions involving
// unsafe.Pointer to find the real type. It converts:
// unsafe.Pointer(x) => x
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
func cgoBaseType(f *File, arg ast.Expr) types.Type {
switch arg := arg.(type) {
case *ast.CallExpr:
if len(arg.Args) == 1 && f.hasBasicType(arg.Fun, types.UnsafePointer) {
return cgoBaseType(f, arg.Args[0])
}
case *ast.StarExpr:
call, ok := arg.X.(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
break
}
// Here arg is *f(v).
t := f.pkg.types[call.Fun].Type
if t == nil {
break
}
ptr, ok := t.Underlying().(*types.Pointer)
if !ok {
break
}
// Here arg is *(*p)(v)
elem, ok := ptr.Elem().Underlying().(*types.Basic)
if !ok || elem.Kind() != types.UnsafePointer {
break
}
// Here arg is *(*unsafe.Pointer)(v)
call, ok = call.Args[0].(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
break
}
// Here arg is *(*unsafe.Pointer)(f(v))
if !f.hasBasicType(call.Fun, types.UnsafePointer) {
break
}
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v))
u, ok := call.Args[0].(*ast.UnaryExpr)
if !ok || u.Op != token.AND {
break
}
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v))
return cgoBaseType(f, u.X)
}
return f.pkg.types[arg].Type
}
// typeOKForCgoCall returns true if the type of arg is OK to pass to a
// C function using cgo. This is not true for Go types with embedded
// pointers.
func typeOKForCgoCall(t types.Type) bool {
if t == nil {
return true
}
switch t := t.Underlying().(type) {
case *types.Chan, *types.Map, *types.Signature, *types.Slice:
return false
case *types.Pointer:
return typeOKForCgoCall(t.Elem())
case *types.Array:
return typeOKForCgoCall(t.Elem())
case *types.Struct:
for i := 0; i < t.NumFields(); i++ {
if !typeOKForCgoCall(t.Field(i).Type()) {
return false
}
}
}
return true
}

82
tools/vendor/github.com/dnephin/govet/composite.go generated vendored Normal file
View File

@@ -0,0 +1,82 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the test for unkeyed struct literals.
package main
import (
"github.com/dnephin/govet/internal/whitelist"
"flag"
"go/ast"
"go/types"
"strings"
)
var compositeWhiteList = flag.Bool("compositewhitelist", true, "use composite white list; for testing only")
func init() {
register("composites",
"check that composite literals used field-keyed elements",
checkUnkeyedLiteral,
compositeLit)
}
// checkUnkeyedLiteral checks if a composite literal is a struct literal with
// unkeyed fields.
func checkUnkeyedLiteral(f *File, node ast.Node) {
cl := node.(*ast.CompositeLit)
typ := f.pkg.types[cl].Type
if typ == nil {
// cannot determine composite literals' type, skip it
return
}
typeName := typ.String()
if *compositeWhiteList && whitelist.UnkeyedLiteral[typeName] {
// skip whitelisted types
return
}
if _, ok := typ.Underlying().(*types.Struct); !ok {
// skip non-struct composite literals
return
}
if isLocalType(f, typeName) {
// allow unkeyed locally defined composite literal
return
}
// check if the CompositeLit contains an unkeyed field
allKeyValue := true
for _, e := range cl.Elts {
if _, ok := e.(*ast.KeyValueExpr); !ok {
allKeyValue = false
break
}
}
if allKeyValue {
// all the composite literal fields are keyed
return
}
f.Badf(cl.Pos(), "%s composite literal uses unkeyed fields", typeName)
}
func isLocalType(f *File, typeName string) bool {
if strings.HasPrefix(typeName, "struct{") {
// struct literals are local types
return true
}
pkgname := f.pkg.path
if strings.HasPrefix(typeName, pkgname+".") {
return true
}
// treat types as local inside test packages with _test name suffix
if strings.HasSuffix(pkgname, "_test") {
pkgname = pkgname[:len(pkgname)-len("_test")]
}
return strings.HasPrefix(typeName, pkgname+".")
}

239
tools/vendor/github.com/dnephin/govet/copylock.go generated vendored Normal file
View File

@@ -0,0 +1,239 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the code to check that locks are not passed by value.
package main
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"go/types"
)
func init() {
register("copylocks",
"check that locks are not passed by value",
checkCopyLocks,
funcDecl, rangeStmt, funcLit, callExpr, assignStmt, genDecl, compositeLit, returnStmt)
}
// checkCopyLocks checks whether node might
// inadvertently copy a lock.
func checkCopyLocks(f *File, node ast.Node) {
switch node := node.(type) {
case *ast.RangeStmt:
checkCopyLocksRange(f, node)
case *ast.FuncDecl:
checkCopyLocksFunc(f, node.Name.Name, node.Recv, node.Type)
case *ast.FuncLit:
checkCopyLocksFunc(f, "func", nil, node.Type)
case *ast.CallExpr:
checkCopyLocksCallExpr(f, node)
case *ast.AssignStmt:
checkCopyLocksAssign(f, node)
case *ast.GenDecl:
checkCopyLocksGenDecl(f, node)
case *ast.CompositeLit:
checkCopyLocksCompositeLit(f, node)
case *ast.ReturnStmt:
checkCopyLocksReturnStmt(f, node)
}
}
// checkCopyLocksAssign checks whether an assignment
// copies a lock.
func checkCopyLocksAssign(f *File, as *ast.AssignStmt) {
for i, x := range as.Rhs {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "assignment copies lock value to %v: %v", f.gofmt(as.Lhs[i]), path)
}
}
}
// checkCopyLocksGenDecl checks whether lock is copied
// in variable declaration.
func checkCopyLocksGenDecl(f *File, gd *ast.GenDecl) {
if gd.Tok != token.VAR {
return
}
for _, spec := range gd.Specs {
valueSpec := spec.(*ast.ValueSpec)
for i, x := range valueSpec.Values {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
}
}
}
}
// checkCopyLocksCompositeLit detects lock copy inside a composite literal
func checkCopyLocksCompositeLit(f *File, cl *ast.CompositeLit) {
for _, x := range cl.Elts {
if node, ok := x.(*ast.KeyValueExpr); ok {
x = node.Value
}
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "literal copies lock value from %v: %v", f.gofmt(x), path)
}
}
}
// checkCopyLocksReturnStmt detects lock copy in return statement
func checkCopyLocksReturnStmt(f *File, rs *ast.ReturnStmt) {
for _, x := range rs.Results {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "return copies lock value: %v", path)
}
}
}
// checkCopyLocksCallExpr detects lock copy in the arguments to a function call
func checkCopyLocksCallExpr(f *File, ce *ast.CallExpr) {
if id, ok := ce.Fun.(*ast.Ident); ok && id.Name == "new" && f.pkg.types[id].IsBuiltin() {
// Skip 'new(Type)' for built-in 'new'
return
}
for _, x := range ce.Args {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "function call copies lock value: %v", path)
}
}
}
// checkCopyLocksFunc checks whether a function might
// inadvertently copy a lock, by checking whether
// its receiver, parameters, or return values
// are locks.
func checkCopyLocksFunc(f *File, name string, recv *ast.FieldList, typ *ast.FuncType) {
if recv != nil && len(recv.List) > 0 {
expr := recv.List[0].Type
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
}
}
if typ.Params != nil {
for _, field := range typ.Params.List {
expr := field.Type
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
}
}
}
// Don't check typ.Results. If T has a Lock field it's OK to write
// return T{}
// because that is returning the zero value. Leave result checking
// to the return statement.
}
// checkCopyLocksRange checks whether a range statement
// might inadvertently copy a lock by checking whether
// any of the range variables are locks.
func checkCopyLocksRange(f *File, r *ast.RangeStmt) {
checkCopyLocksRangeVar(f, r.Tok, r.Key)
checkCopyLocksRangeVar(f, r.Tok, r.Value)
}
func checkCopyLocksRangeVar(f *File, rtok token.Token, e ast.Expr) {
if e == nil {
return
}
id, isId := e.(*ast.Ident)
if isId && id.Name == "_" {
return
}
var typ types.Type
if rtok == token.DEFINE {
if !isId {
return
}
obj := f.pkg.defs[id]
if obj == nil {
return
}
typ = obj.Type()
} else {
typ = f.pkg.types[e].Type
}
if typ == nil {
return
}
if path := lockPath(f.pkg.typesPkg, typ); path != nil {
f.Badf(e.Pos(), "range var %s copies lock: %v", f.gofmt(e), path)
}
}
type typePath []types.Type
// String pretty-prints a typePath.
func (path typePath) String() string {
n := len(path)
var buf bytes.Buffer
for i := range path {
if i > 0 {
fmt.Fprint(&buf, " contains ")
}
// The human-readable path is in reverse order, outermost to innermost.
fmt.Fprint(&buf, path[n-i-1].String())
}
return buf.String()
}
func lockPathRhs(f *File, x ast.Expr) typePath {
if _, ok := x.(*ast.CompositeLit); ok {
return nil
}
if _, ok := x.(*ast.CallExpr); ok {
// A call may return a zero value.
return nil
}
if star, ok := x.(*ast.StarExpr); ok {
if _, ok := star.X.(*ast.CallExpr); ok {
// A call may return a pointer to a zero value.
return nil
}
}
return lockPath(f.pkg.typesPkg, f.pkg.types[x].Type)
}
// lockPath returns a typePath describing the location of a lock value
// contained in typ. If there is no contained lock, it returns nil.
func lockPath(tpkg *types.Package, typ types.Type) typePath {
if typ == nil {
return nil
}
// We're only interested in the case in which the underlying
// type is a struct. (Interfaces and pointers are safe to copy.)
styp, ok := typ.Underlying().(*types.Struct)
if !ok {
return nil
}
// We're looking for cases in which a reference to this type
// can be locked, but a value cannot. This differentiates
// embedded interfaces from embedded values.
if plock := types.NewMethodSet(types.NewPointer(typ)).Lookup(tpkg, "Lock"); plock != nil {
if lock := types.NewMethodSet(typ).Lookup(tpkg, "Lock"); lock == nil {
return []types.Type{typ}
}
}
nfields := styp.NumFields()
for i := 0; i < nfields; i++ {
ftyp := styp.Field(i).Type()
subpath := lockPath(tpkg, ftyp)
if subpath != nil {
return append(subpath, typ)
}
}
return nil
}

298
tools/vendor/github.com/dnephin/govet/deadcode.go generated vendored Normal file
View File

@@ -0,0 +1,298 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for syntactically unreachable code.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("unreachable",
"check for unreachable code",
checkUnreachable,
funcDecl, funcLit)
}
type deadState struct {
f *File
hasBreak map[ast.Stmt]bool
hasGoto map[string]bool
labels map[string]ast.Stmt
breakTarget ast.Stmt
reachable bool
}
// checkUnreachable checks a function body for dead code.
//
// TODO(adonovan): use the new cfg package, which is more precise.
func checkUnreachable(f *File, node ast.Node) {
var body *ast.BlockStmt
switch n := node.(type) {
case *ast.FuncDecl:
body = n.Body
case *ast.FuncLit:
body = n.Body
}
if body == nil {
return
}
d := &deadState{
f: f,
hasBreak: make(map[ast.Stmt]bool),
hasGoto: make(map[string]bool),
labels: make(map[string]ast.Stmt),
}
d.findLabels(body)
d.reachable = true
d.findDead(body)
}
// findLabels gathers information about the labels defined and used by stmt
// and about which statements break, whether a label is involved or not.
func (d *deadState) findLabels(stmt ast.Stmt) {
switch x := stmt.(type) {
default:
d.f.Warnf(x.Pos(), "internal error in findLabels: unexpected statement %T", x)
case *ast.AssignStmt,
*ast.BadStmt,
*ast.DeclStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.ExprStmt,
*ast.GoStmt,
*ast.IncDecStmt,
*ast.ReturnStmt,
*ast.SendStmt:
// no statements inside
case *ast.BlockStmt:
for _, stmt := range x.List {
d.findLabels(stmt)
}
case *ast.BranchStmt:
switch x.Tok {
case token.GOTO:
if x.Label != nil {
d.hasGoto[x.Label.Name] = true
}
case token.BREAK:
stmt := d.breakTarget
if x.Label != nil {
stmt = d.labels[x.Label.Name]
}
if stmt != nil {
d.hasBreak[stmt] = true
}
}
case *ast.IfStmt:
d.findLabels(x.Body)
if x.Else != nil {
d.findLabels(x.Else)
}
case *ast.LabeledStmt:
d.labels[x.Label.Name] = x.Stmt
d.findLabels(x.Stmt)
// These cases are all the same, but the x.Body only works
// when the specific type of x is known, so the cases cannot
// be merged.
case *ast.ForStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.RangeStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.SelectStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.SwitchStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.TypeSwitchStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.CommClause:
for _, stmt := range x.Body {
d.findLabels(stmt)
}
case *ast.CaseClause:
for _, stmt := range x.Body {
d.findLabels(stmt)
}
}
}
// findDead walks the statement looking for dead code.
// If d.reachable is false on entry, stmt itself is dead.
// When findDead returns, d.reachable tells whether the
// statement following stmt is reachable.
func (d *deadState) findDead(stmt ast.Stmt) {
// Is this a labeled goto target?
// If so, assume it is reachable due to the goto.
// This is slightly conservative, in that we don't
// check that the goto is reachable, so
// L: goto L
// will not provoke a warning.
// But it's good enough.
if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] {
d.reachable = true
}
if !d.reachable {
switch stmt.(type) {
case *ast.EmptyStmt:
// do not warn about unreachable empty statements
default:
d.f.Bad(stmt.Pos(), "unreachable code")
d.reachable = true // silence error about next statement
}
}
switch x := stmt.(type) {
default:
d.f.Warnf(x.Pos(), "internal error in findDead: unexpected statement %T", x)
case *ast.AssignStmt,
*ast.BadStmt,
*ast.DeclStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.GoStmt,
*ast.IncDecStmt,
*ast.SendStmt:
// no control flow
case *ast.BlockStmt:
for _, stmt := range x.List {
d.findDead(stmt)
}
case *ast.BranchStmt:
switch x.Tok {
case token.BREAK, token.GOTO, token.FALLTHROUGH:
d.reachable = false
case token.CONTINUE:
// NOTE: We accept "continue" statements as terminating.
// They are not necessary in the spec definition of terminating,
// because a continue statement cannot be the final statement
// before a return. But for the more general problem of syntactically
// identifying dead code, continue redirects control flow just
// like the other terminating statements.
d.reachable = false
}
case *ast.ExprStmt:
// Call to panic?
call, ok := x.X.(*ast.CallExpr)
if ok {
name, ok := call.Fun.(*ast.Ident)
if ok && name.Name == "panic" && name.Obj == nil {
d.reachable = false
}
}
case *ast.ForStmt:
d.findDead(x.Body)
d.reachable = x.Cond != nil || d.hasBreak[x]
case *ast.IfStmt:
d.findDead(x.Body)
if x.Else != nil {
r := d.reachable
d.reachable = true
d.findDead(x.Else)
d.reachable = d.reachable || r
} else {
// might not have executed if statement
d.reachable = true
}
case *ast.LabeledStmt:
d.findDead(x.Stmt)
case *ast.RangeStmt:
d.findDead(x.Body)
d.reachable = true
case *ast.ReturnStmt:
d.reachable = false
case *ast.SelectStmt:
// NOTE: Unlike switch and type switch below, we don't care
// whether a select has a default, because a select without a
// default blocks until one of the cases can run. That's different
// from a switch without a default, which behaves like it has
// a default with an empty body.
anyReachable := false
for _, comm := range x.Body.List {
d.reachable = true
for _, stmt := range comm.(*ast.CommClause).Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x]
case *ast.SwitchStmt:
anyReachable := false
hasDefault := false
for _, cas := range x.Body.List {
cc := cas.(*ast.CaseClause)
if cc.List == nil {
hasDefault = true
}
d.reachable = true
for _, stmt := range cc.Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
case *ast.TypeSwitchStmt:
anyReachable := false
hasDefault := false
for _, cas := range x.Body.List {
cc := cas.(*ast.CaseClause)
if cc.List == nil {
hasDefault = true
}
d.reachable = true
for _, stmt := range cc.Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
}
}

205
tools/vendor/github.com/dnephin/govet/doc.go generated vendored Normal file
View File

@@ -0,0 +1,205 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Vet examines Go source code and reports suspicious constructs, such as Printf
calls whose arguments do not align with the format string. Vet uses heuristics
that do not guarantee all reports are genuine problems, but it can find errors
not caught by the compilers.
It can be invoked three ways:
By package, from the go tool:
go vet package/path/name
vets the package whose path is provided.
By files:
go tool vet source/directory/*.go
vets the files named, all of which must be in the same package.
By directory:
go tool vet source/directory
recursively descends the directory, vetting each package it finds.
Vet's exit code is 2 for erroneous invocation of the tool, 1 if a
problem was reported, and 0 otherwise. Note that the tool does not
check every possible problem and depends on unreliable heuristics
so it should be used as guidance only, not as a firm indicator of
program correctness.
By default the -all flag is set so all checks are performed.
If any flags are explicitly set to true, only those tests are run. Conversely, if
any flag is explicitly set to false, only those tests are disabled. Thus -printf=true
runs the printf check, -printf=false runs all checks except the printf check.
Available checks:
Assembly declarations
Flag: -asmdecl
Mismatches between assembly files and Go function declarations.
Useless assignments
Flag: -assign
Check for useless assignments.
Atomic mistakes
Flag: -atomic
Common mistaken usages of the sync/atomic package.
Boolean conditions
Flag: -bool
Mistakes involving boolean operators.
Build tags
Flag: -buildtags
Badly formed or misplaced +build tags.
Invalid uses of cgo
Flag: -cgocall
Detect some violations of the cgo pointer passing rules.
Unkeyed composite literals
Flag: -composites
Composite struct literals that do not use the field-keyed syntax.
Copying locks
Flag: -copylocks
Locks that are erroneously passed by value.
Tests, benchmarks and documentation examples
Flag: -tests
Mistakes involving tests including functions with incorrect names or signatures
and example tests that document identifiers not in the package.
Failure to call the cancelation function returned by context.WithCancel.
Flag: -lostcancel
The cancelation function returned by context.WithCancel, WithTimeout,
and WithDeadline must be called or the new context will remain live
until its parent context is cancelled.
(The background context is never cancelled.)
Methods
Flag: -methods
Non-standard signatures for methods with familiar names, including:
Format GobEncode GobDecode MarshalJSON MarshalXML
Peek ReadByte ReadFrom ReadRune Scan Seek
UnmarshalJSON UnreadByte UnreadRune WriteByte
WriteTo
Nil function comparison
Flag: -nilfunc
Comparisons between functions and nil.
Printf family
Flag: -printf
Suspicious calls to functions in the Printf family, including any functions
with these names, disregarding case:
Print Printf Println
Fprint Fprintf Fprintln
Sprint Sprintf Sprintln
Error Errorf
Fatal Fatalf
Log Logf
Panic Panicf Panicln
The -printfuncs flag can be used to redefine this list.
If the function name ends with an 'f', the function is assumed to take
a format descriptor string in the manner of fmt.Printf. If not, vet
complains about arguments that look like format descriptor strings.
It also checks for errors such as using a Writer as the first argument of
Printf.
Struct tags
Range loop variables
Flag: -rangeloops
Incorrect uses of range loop variables in closures.
Shadowed variables
Flag: -shadow=false (experimental; must be set explicitly)
Variables that may have been unintentionally shadowed.
Shifts
Flag: -shift
Shifts equal to or longer than the variable's length.
Flag: -structtags
Struct tags that do not follow the format understood by reflect.StructTag.Get.
Well-known encoding struct tags (json, xml) used with unexported fields.
Unreachable code
Flag: -unreachable
Unreachable code.
Misuse of unsafe Pointers
Flag: -unsafeptr
Likely incorrect uses of unsafe.Pointer to convert integers to pointers.
A conversion from uintptr to unsafe.Pointer is invalid if it implies that
there is a uintptr-typed word in memory that holds a pointer value,
because that word will be invisible to stack copying and to the garbage
collector.
Unused result of certain function calls
Flag: -unusedresult
Calls to well-known functions and methods that return a value that is
discarded. By default, this includes functions like fmt.Errorf and
fmt.Sprintf and methods like String and Error. The flags -unusedfuncs
and -unusedstringmethods control the set.
Other flags
These flags configure the behavior of vet:
-all (default true)
Enable all non-experimental checks.
-v
Verbose mode
-printfuncs
A comma-separated list of print-like function names
to supplement the standard list.
For more information, see the discussion of the -printf flag.
-shadowstrict
Whether to be strict about shadowing; can be noisy.
*/
package main

View File

@@ -0,0 +1,512 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cfg
// This file implements the CFG construction pass.
import (
"fmt"
"go/ast"
"go/token"
)
type builder struct {
cfg *CFG
mayReturn func(*ast.CallExpr) bool
current *Block
lblocks map[*ast.Object]*lblock // labeled blocks
targets *targets // linked stack of branch targets
}
func (b *builder) stmt(_s ast.Stmt) {
// The label of the current statement. If non-nil, its _goto
// target is always set; its _break and _continue are set only
// within the body of switch/typeswitch/select/for/range.
// It is effectively an additional default-nil parameter of stmt().
var label *lblock
start:
switch s := _s.(type) {
case *ast.BadStmt,
*ast.SendStmt,
*ast.IncDecStmt,
*ast.GoStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.AssignStmt:
// No effect on control flow.
b.add(s)
case *ast.ExprStmt:
b.add(s)
if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) {
// Calls to panic, os.Exit, etc, never return.
b.current = b.newUnreachableBlock("unreachable.call")
}
case *ast.DeclStmt:
// Treat each var ValueSpec as a separate statement.
d := s.Decl.(*ast.GenDecl)
if d.Tok == token.VAR {
for _, spec := range d.Specs {
if spec, ok := spec.(*ast.ValueSpec); ok {
b.add(spec)
}
}
}
case *ast.LabeledStmt:
label = b.labeledBlock(s.Label)
b.jump(label._goto)
b.current = label._goto
_s = s.Stmt
goto start // effectively: tailcall stmt(g, s.Stmt, label)
case *ast.ReturnStmt:
b.add(s)
b.current = b.newUnreachableBlock("unreachable.return")
case *ast.BranchStmt:
var block *Block
switch s.Tok {
case token.BREAK:
if s.Label != nil {
if lb := b.labeledBlock(s.Label); lb != nil {
block = lb._break
}
} else {
for t := b.targets; t != nil && block == nil; t = t.tail {
block = t._break
}
}
case token.CONTINUE:
if s.Label != nil {
if lb := b.labeledBlock(s.Label); lb != nil {
block = lb._continue
}
} else {
for t := b.targets; t != nil && block == nil; t = t.tail {
block = t._continue
}
}
case token.FALLTHROUGH:
for t := b.targets; t != nil; t = t.tail {
block = t._fallthrough
}
case token.GOTO:
if s.Label != nil {
block = b.labeledBlock(s.Label)._goto
}
}
if block == nil {
block = b.newBlock("undefined.branch")
}
b.jump(block)
b.current = b.newUnreachableBlock("unreachable.branch")
case *ast.BlockStmt:
b.stmtList(s.List)
case *ast.IfStmt:
if s.Init != nil {
b.stmt(s.Init)
}
then := b.newBlock("if.then")
done := b.newBlock("if.done")
_else := done
if s.Else != nil {
_else = b.newBlock("if.else")
}
b.add(s.Cond)
b.ifelse(then, _else)
b.current = then
b.stmt(s.Body)
b.jump(done)
if s.Else != nil {
b.current = _else
b.stmt(s.Else)
b.jump(done)
}
b.current = done
case *ast.SwitchStmt:
b.switchStmt(s, label)
case *ast.TypeSwitchStmt:
b.typeSwitchStmt(s, label)
case *ast.SelectStmt:
b.selectStmt(s, label)
case *ast.ForStmt:
b.forStmt(s, label)
case *ast.RangeStmt:
b.rangeStmt(s, label)
default:
panic(fmt.Sprintf("unexpected statement kind: %T", s))
}
}
func (b *builder) stmtList(list []ast.Stmt) {
for _, s := range list {
b.stmt(s)
}
}
func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) {
if s.Init != nil {
b.stmt(s.Init)
}
if s.Tag != nil {
b.add(s.Tag)
}
done := b.newBlock("switch.done")
if label != nil {
label._break = done
}
// We pull the default case (if present) down to the end.
// But each fallthrough label must point to the next
// body block in source order, so we preallocate a
// body block (fallthru) for the next case.
// Unfortunately this makes for a confusing block order.
var defaultBody *[]ast.Stmt
var defaultFallthrough *Block
var fallthru, defaultBlock *Block
ncases := len(s.Body.List)
for i, clause := range s.Body.List {
body := fallthru
if body == nil {
body = b.newBlock("switch.body") // first case only
}
// Preallocate body block for the next case.
fallthru = done
if i+1 < ncases {
fallthru = b.newBlock("switch.body")
}
cc := clause.(*ast.CaseClause)
if cc.List == nil {
// Default case.
defaultBody = &cc.Body
defaultFallthrough = fallthru
defaultBlock = body
continue
}
var nextCond *Block
for _, cond := range cc.List {
nextCond = b.newBlock("switch.next")
b.add(cond) // one half of the tag==cond condition
b.ifelse(body, nextCond)
b.current = nextCond
}
b.current = body
b.targets = &targets{
tail: b.targets,
_break: done,
_fallthrough: fallthru,
}
b.stmtList(cc.Body)
b.targets = b.targets.tail
b.jump(done)
b.current = nextCond
}
if defaultBlock != nil {
b.jump(defaultBlock)
b.current = defaultBlock
b.targets = &targets{
tail: b.targets,
_break: done,
_fallthrough: defaultFallthrough,
}
b.stmtList(*defaultBody)
b.targets = b.targets.tail
}
b.jump(done)
b.current = done
}
func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) {
if s.Init != nil {
b.stmt(s.Init)
}
if s.Assign != nil {
b.add(s.Assign)
}
done := b.newBlock("typeswitch.done")
if label != nil {
label._break = done
}
var default_ *ast.CaseClause
for _, clause := range s.Body.List {
cc := clause.(*ast.CaseClause)
if cc.List == nil {
default_ = cc
continue
}
body := b.newBlock("typeswitch.body")
var next *Block
for _, casetype := range cc.List {
next = b.newBlock("typeswitch.next")
// casetype is a type, so don't call b.add(casetype).
// This block logically contains a type assertion,
// x.(casetype), but it's unclear how to represent x.
_ = casetype
b.ifelse(body, next)
b.current = next
}
b.current = body
b.typeCaseBody(cc, done)
b.current = next
}
if default_ != nil {
b.typeCaseBody(default_, done)
} else {
b.jump(done)
}
b.current = done
}
func (b *builder) typeCaseBody(cc *ast.CaseClause, done *Block) {
b.targets = &targets{
tail: b.targets,
_break: done,
}
b.stmtList(cc.Body)
b.targets = b.targets.tail
b.jump(done)
}
func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) {
// First evaluate channel expressions.
// TODO(adonovan): fix: evaluate only channel exprs here.
for _, clause := range s.Body.List {
if comm := clause.(*ast.CommClause).Comm; comm != nil {
b.stmt(comm)
}
}
done := b.newBlock("select.done")
if label != nil {
label._break = done
}
var defaultBody *[]ast.Stmt
for _, cc := range s.Body.List {
clause := cc.(*ast.CommClause)
if clause.Comm == nil {
defaultBody = &clause.Body
continue
}
body := b.newBlock("select.body")
next := b.newBlock("select.next")
b.ifelse(body, next)
b.current = body
b.targets = &targets{
tail: b.targets,
_break: done,
}
switch comm := clause.Comm.(type) {
case *ast.ExprStmt: // <-ch
// nop
case *ast.AssignStmt: // x := <-states[state].Chan
b.add(comm.Lhs[0])
}
b.stmtList(clause.Body)
b.targets = b.targets.tail
b.jump(done)
b.current = next
}
if defaultBody != nil {
b.targets = &targets{
tail: b.targets,
_break: done,
}
b.stmtList(*defaultBody)
b.targets = b.targets.tail
b.jump(done)
}
b.current = done
}
func (b *builder) forStmt(s *ast.ForStmt, label *lblock) {
// ...init...
// jump loop
// loop:
// if cond goto body else done
// body:
// ...body...
// jump post
// post: (target of continue)
// ...post...
// jump loop
// done: (target of break)
if s.Init != nil {
b.stmt(s.Init)
}
body := b.newBlock("for.body")
done := b.newBlock("for.done") // target of 'break'
loop := body // target of back-edge
if s.Cond != nil {
loop = b.newBlock("for.loop")
}
cont := loop // target of 'continue'
if s.Post != nil {
cont = b.newBlock("for.post")
}
if label != nil {
label._break = done
label._continue = cont
}
b.jump(loop)
b.current = loop
if loop != body {
b.add(s.Cond)
b.ifelse(body, done)
b.current = body
}
b.targets = &targets{
tail: b.targets,
_break: done,
_continue: cont,
}
b.stmt(s.Body)
b.targets = b.targets.tail
b.jump(cont)
if s.Post != nil {
b.current = cont
b.stmt(s.Post)
b.jump(loop) // back-edge
}
b.current = done
}
func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) {
b.add(s.X)
if s.Key != nil {
b.add(s.Key)
}
if s.Value != nil {
b.add(s.Value)
}
// ...
// loop: (target of continue)
// if ... goto body else done
// body:
// ...
// jump loop
// done: (target of break)
loop := b.newBlock("range.loop")
b.jump(loop)
b.current = loop
body := b.newBlock("range.body")
done := b.newBlock("range.done")
b.ifelse(body, done)
b.current = body
if label != nil {
label._break = done
label._continue = loop
}
b.targets = &targets{
tail: b.targets,
_break: done,
_continue: loop,
}
b.stmt(s.Body)
b.targets = b.targets.tail
b.jump(loop) // back-edge
b.current = done
}
// -------- helpers --------
// Destinations associated with unlabeled for/switch/select stmts.
// We push/pop one of these as we enter/leave each construct and for
// each BranchStmt we scan for the innermost target of the right type.
//
type targets struct {
tail *targets // rest of stack
_break *Block
_continue *Block
_fallthrough *Block
}
// Destinations associated with a labeled block.
// We populate these as labels are encountered in forward gotos or
// labeled statements.
//
type lblock struct {
_goto *Block
_break *Block
_continue *Block
}
// labeledBlock returns the branch target associated with the
// specified label, creating it if needed.
//
func (b *builder) labeledBlock(label *ast.Ident) *lblock {
lb := b.lblocks[label.Obj]
if lb == nil {
lb = &lblock{_goto: b.newBlock(label.Name)}
if b.lblocks == nil {
b.lblocks = make(map[*ast.Object]*lblock)
}
b.lblocks[label.Obj] = lb
}
return lb
}
// newBlock appends a new unconnected basic block to b.cfg's block
// slice and returns it.
// It does not automatically become the current block.
// comment is an optional string for more readable debugging output.
func (b *builder) newBlock(comment string) *Block {
g := b.cfg
block := &Block{
index: int32(len(g.Blocks)),
comment: comment,
}
block.Succs = block.succs2[:0]
g.Blocks = append(g.Blocks, block)
return block
}
func (b *builder) newUnreachableBlock(comment string) *Block {
block := b.newBlock(comment)
block.unreachable = true
return block
}
func (b *builder) add(n ast.Node) {
b.current.Nodes = append(b.current.Nodes, n)
}
// jump adds an edge from the current block to the target block,
// and sets b.current to nil.
func (b *builder) jump(target *Block) {
b.current.Succs = append(b.current.Succs, target)
b.current = nil
}
// ifelse emits edges from the current block to the t and f blocks,
// and sets b.current to nil.
func (b *builder) ifelse(t, f *Block) {
b.current.Succs = append(b.current.Succs, t, f)
b.current = nil
}

View File

@@ -0,0 +1,142 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This package constructs a simple control-flow graph (CFG) of the
// statements and expressions within a single function.
//
// Use cfg.New to construct the CFG for a function body.
//
// The blocks of the CFG contain all the function's non-control
// statements. The CFG does not contain control statements such as If,
// Switch, Select, and Branch, but does contain their subexpressions.
// For example, this source code:
//
// if x := f(); x != nil {
// T()
// } else {
// F()
// }
//
// produces this CFG:
//
// 1: x := f()
// x != nil
// succs: 2, 3
// 2: T()
// succs: 4
// 3: F()
// succs: 4
// 4:
//
// The CFG does contain Return statements; even implicit returns are
// materialized (at the position of the function's closing brace).
//
// The CFG does not record conditions associated with conditional branch
// edges, nor the short-circuit semantics of the && and || operators,
// nor abnormal control flow caused by panic. If you need this
// information, use golang.org/x/tools/go/ssa instead.
//
package cfg
// Although the vet tool has type information, it is often extremely
// fragmentary, so for simplicity this package does not depend on
// go/types. Consequently control-flow conditions are ignored even
// when constant, and "mayReturn" information must be provided by the
// client.
import (
"bytes"
"fmt"
"go/ast"
"go/format"
"go/token"
)
// A CFG represents the control-flow graph of a single function.
//
// The entry point is Blocks[0]; there may be multiple return blocks.
type CFG struct {
Blocks []*Block // block[0] is entry; order otherwise undefined
}
// A Block represents a basic block: a list of statements and
// expressions that are always evaluated sequentially.
//
// A block may have 0-2 successors: zero for a return block or a block
// that calls a function such as panic that never returns; one for a
// normal (jump) block; and two for a conditional (if) block.
type Block struct {
Nodes []ast.Node // statements, expressions, and ValueSpecs
Succs []*Block // successor nodes in the graph
comment string // for debugging
index int32 // index within CFG.Blocks
unreachable bool // is block of stmts following return/panic/for{}
succs2 [2]*Block // underlying array for Succs
}
// New returns a new control-flow graph for the specified function body,
// which must be non-nil.
//
// The CFG builder calls mayReturn to determine whether a given function
// call may return. For example, calls to panic, os.Exit, and log.Fatal
// do not return, so the builder can remove infeasible graph edges
// following such calls. The builder calls mayReturn only for a
// CallExpr beneath an ExprStmt.
func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
b := builder{
mayReturn: mayReturn,
cfg: new(CFG),
}
b.current = b.newBlock("entry")
b.stmt(body)
// Does control fall off the end of the function's body?
// Make implicit return explicit.
if b.current != nil && !b.current.unreachable {
b.add(&ast.ReturnStmt{
Return: body.End() - 1,
})
}
return b.cfg
}
func (b *Block) String() string {
return fmt.Sprintf("block %d (%s)", b.index, b.comment)
}
// Return returns the return statement at the end of this block if present, nil otherwise.
func (b *Block) Return() (ret *ast.ReturnStmt) {
if len(b.Nodes) > 0 {
ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt)
}
return
}
// Format formats the control-flow graph for ease of debugging.
func (g *CFG) Format(fset *token.FileSet) string {
var buf bytes.Buffer
for _, b := range g.Blocks {
fmt.Fprintf(&buf, ".%d: # %s\n", b.index, b.comment)
for _, n := range b.Nodes {
fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n))
}
if len(b.Succs) > 0 {
fmt.Fprintf(&buf, "\tsuccs:")
for _, succ := range b.Succs {
fmt.Fprintf(&buf, " %d", succ.index)
}
buf.WriteByte('\n')
}
buf.WriteByte('\n')
}
return buf.String()
}
func formatNode(fset *token.FileSet, n ast.Node) string {
var buf bytes.Buffer
format.Node(&buf, fset, n)
// Indent secondary lines by a tab.
return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1))
}

View File

@@ -0,0 +1,28 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package whitelist defines exceptions for the vet tool.
package whitelist
// UnkeyedLiteral is a white list of types in the standard packages
// that are used with unkeyed literals we deem to be acceptable.
var UnkeyedLiteral = map[string]bool{
// These image and image/color struct types are frozen. We will never add fields to them.
"image/color.Alpha16": true,
"image/color.Alpha": true,
"image/color.CMYK": true,
"image/color.Gray16": true,
"image/color.Gray": true,
"image/color.NRGBA64": true,
"image/color.NRGBA": true,
"image/color.NYCbCrA": true,
"image/color.RGBA64": true,
"image/color.RGBA": true,
"image/color.YCbCr": true,
"image.Point": true,
"image.Rectangle": true,
"image.Uniform": true,
"unicode.Range16": true,
}

318
tools/vendor/github.com/dnephin/govet/lostcancel.go generated vendored Normal file
View File

@@ -0,0 +1,318 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"github.com/dnephin/govet/internal/cfg"
"fmt"
"go/ast"
"go/types"
"strconv"
)
func init() {
register("lostcancel",
"check for failure to call cancelation function returned by context.WithCancel",
checkLostCancel,
funcDecl, funcLit)
}
const debugLostCancel = false
var contextPackage = "context"
// checkLostCancel reports a failure to the call the cancel function
// returned by context.WithCancel, either because the variable was
// assigned to the blank identifier, or because there exists a
// control-flow path from the call to a return statement and that path
// does not "use" the cancel function. Any reference to the variable
// counts as a use, even within a nested function literal.
//
// checkLostCancel analyzes a single named or literal function.
func checkLostCancel(f *File, node ast.Node) {
// Fast path: bypass check if file doesn't use context.WithCancel.
if !hasImport(f.file, contextPackage) {
return
}
// Maps each cancel variable to its defining ValueSpec/AssignStmt.
cancelvars := make(map[*types.Var]ast.Node)
// Find the set of cancel vars to analyze.
stack := make([]ast.Node, 0, 32)
ast.Inspect(node, func(n ast.Node) bool {
switch n.(type) {
case *ast.FuncLit:
if len(stack) > 0 {
return false // don't stray into nested functions
}
case nil:
stack = stack[:len(stack)-1] // pop
return true
}
stack = append(stack, n) // push
// Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
//
// ctx, cancel := context.WithCancel(...)
// ctx, cancel = context.WithCancel(...)
// var ctx, cancel = context.WithCancel(...)
//
if isContextWithCancel(f, n) && isCall(stack[len(stack)-2]) {
var id *ast.Ident // id of cancel var
stmt := stack[len(stack)-3]
switch stmt := stmt.(type) {
case *ast.ValueSpec:
if len(stmt.Names) > 1 {
id = stmt.Names[1]
}
case *ast.AssignStmt:
if len(stmt.Lhs) > 1 {
id, _ = stmt.Lhs[1].(*ast.Ident)
}
}
if id != nil {
if id.Name == "_" {
f.Badf(id.Pos(), "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
n.(*ast.SelectorExpr).Sel.Name)
} else if v, ok := f.pkg.uses[id].(*types.Var); ok {
cancelvars[v] = stmt
} else if v, ok := f.pkg.defs[id].(*types.Var); ok {
cancelvars[v] = stmt
}
}
}
return true
})
if len(cancelvars) == 0 {
return // no need to build CFG
}
// Tell the CFG builder which functions never return.
info := &types.Info{Uses: f.pkg.uses, Selections: f.pkg.selectors}
mayReturn := func(call *ast.CallExpr) bool {
name := callName(info, call)
return !noReturnFuncs[name]
}
// Build the CFG.
var g *cfg.CFG
var sig *types.Signature
switch node := node.(type) {
case *ast.FuncDecl:
sig, _ = f.pkg.defs[node.Name].Type().(*types.Signature)
g = cfg.New(node.Body, mayReturn)
case *ast.FuncLit:
sig, _ = f.pkg.types[node.Type].Type.(*types.Signature)
g = cfg.New(node.Body, mayReturn)
}
// Print CFG.
if debugLostCancel {
fmt.Println(g.Format(f.fset))
}
// Examine the CFG for each variable in turn.
// (It would be more efficient to analyze all cancelvars in a
// single pass over the AST, but seldom is there more than one.)
for v, stmt := range cancelvars {
if ret := lostCancelPath(f, g, v, stmt, sig); ret != nil {
lineno := f.fset.Position(stmt.Pos()).Line
f.Badf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
f.Badf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
}
}
}
func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok }
func hasImport(f *ast.File, path string) bool {
for _, imp := range f.Imports {
v, _ := strconv.Unquote(imp.Path.Value)
if v == path {
return true
}
}
return false
}
// isContextWithCancel reports whether n is one of the qualified identifiers
// context.With{Cancel,Timeout,Deadline}.
func isContextWithCancel(f *File, n ast.Node) bool {
if sel, ok := n.(*ast.SelectorExpr); ok {
switch sel.Sel.Name {
case "WithCancel", "WithTimeout", "WithDeadline":
if x, ok := sel.X.(*ast.Ident); ok {
if pkgname, ok := f.pkg.uses[x].(*types.PkgName); ok {
return pkgname.Imported().Path() == contextPackage
}
// Import failed, so we can't check package path.
// Just check the local package name (heuristic).
return x.Name == "context"
}
}
}
return false
}
// lostCancelPath finds a path through the CFG, from stmt (which defines
// the 'cancel' variable v) to a return statement, that doesn't "use" v.
// If it finds one, it returns the return statement (which may be synthetic).
// sig is the function's type, if known.
func lostCancelPath(f *File, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt {
vIsNamedResult := sig != nil && tupleContains(sig.Results(), v)
// uses reports whether stmts contain a "use" of variable v.
uses := func(f *File, v *types.Var, stmts []ast.Node) bool {
found := false
for _, stmt := range stmts {
ast.Inspect(stmt, func(n ast.Node) bool {
switch n := n.(type) {
case *ast.Ident:
if f.pkg.uses[n] == v {
found = true
}
case *ast.ReturnStmt:
// A naked return statement counts as a use
// of the named result variables.
if n.Results == nil && vIsNamedResult {
found = true
}
}
return !found
})
}
return found
}
// blockUses computes "uses" for each block, caching the result.
memo := make(map[*cfg.Block]bool)
blockUses := func(f *File, v *types.Var, b *cfg.Block) bool {
res, ok := memo[b]
if !ok {
res = uses(f, v, b.Nodes)
memo[b] = res
}
return res
}
// Find the var's defining block in the CFG,
// plus the rest of the statements of that block.
var defblock *cfg.Block
var rest []ast.Node
outer:
for _, b := range g.Blocks {
for i, n := range b.Nodes {
if n == stmt {
defblock = b
rest = b.Nodes[i+1:]
break outer
}
}
}
if defblock == nil {
panic("internal error: can't find defining block for cancel var")
}
// Is v "used" in the remainder of its defining block?
if uses(f, v, rest) {
return nil
}
// Does the defining block return without using v?
if ret := defblock.Return(); ret != nil {
return ret
}
// Search the CFG depth-first for a path, from defblock to a
// return block, in which v is never "used".
seen := make(map[*cfg.Block]bool)
var search func(blocks []*cfg.Block) *ast.ReturnStmt
search = func(blocks []*cfg.Block) *ast.ReturnStmt {
for _, b := range blocks {
if !seen[b] {
seen[b] = true
// Prune the search if the block uses v.
if blockUses(f, v, b) {
continue
}
// Found path to return statement?
if ret := b.Return(); ret != nil {
if debugLostCancel {
fmt.Printf("found path to return in block %s\n", b)
}
return ret // found
}
// Recur
if ret := search(b.Succs); ret != nil {
if debugLostCancel {
fmt.Printf(" from block %s\n", b)
}
return ret
}
}
}
return nil
}
return search(defblock.Succs)
}
func tupleContains(tuple *types.Tuple, v *types.Var) bool {
for i := 0; i < tuple.Len(); i++ {
if tuple.At(i) == v {
return true
}
}
return false
}
var noReturnFuncs = map[string]bool{
"(*testing.common).FailNow": true,
"(*testing.common).Fatal": true,
"(*testing.common).Fatalf": true,
"(*testing.common).Skip": true,
"(*testing.common).SkipNow": true,
"(*testing.common).Skipf": true,
"log.Fatal": true,
"log.Fatalf": true,
"log.Fatalln": true,
"os.Exit": true,
"panic": true,
"runtime.Goexit": true,
}
// callName returns the canonical name of the builtin, method, or
// function called by call, if known.
func callName(info *types.Info, call *ast.CallExpr) string {
switch fun := call.Fun.(type) {
case *ast.Ident:
// builtin, e.g. "panic"
if obj, ok := info.Uses[fun].(*types.Builtin); ok {
return obj.Name()
}
case *ast.SelectorExpr:
if sel, ok := info.Selections[fun]; ok && sel.Kind() == types.MethodVal {
// method call, e.g. "(*testing.common).Fatal"
meth := sel.Obj()
return fmt.Sprintf("(%s).%s",
meth.Type().(*types.Signature).Recv().Type(),
meth.Name())
}
if obj, ok := info.Uses[fun.Sel]; ok {
// qualified identifier, e.g. "os.Exit"
return fmt.Sprintf("%s.%s",
obj.Pkg().Path(),
obj.Name())
}
}
// function with no name, or defined in missing imported package
return ""
}

504
tools/vendor/github.com/dnephin/govet/main.go generated vendored Normal file
View File

@@ -0,0 +1,504 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Vet is a simple checker for static errors in Go source code.
// See doc.go for more information.
package main
import (
"bytes"
"flag"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/printer"
"go/token"
"go/types"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
)
var (
verbose = flag.Bool("v", false, "verbose")
tags = flag.String("tags", "", "comma-separated list of build tags to apply when parsing")
noRecurse = flag.Bool("no-recurse", false, "disable recursive directory walking")
tagList = []string{} // exploded version of tags flag; set in main
)
var exitCode = 0
// "-all" flag enables all non-experimental checks
var all = triStateFlag("all", unset, "enable all non-experimental checks")
// Flags to control which individual checks to perform.
var report = map[string]*triState{
// Only unusual checks are written here.
// Most checks that operate during the AST walk are added by register.
"asmdecl": triStateFlag("asmdecl", unset, "check assembly against Go declarations"),
"buildtags": triStateFlag("buildtags", unset, "check that +build tags are valid"),
}
// experimental records the flags enabling experimental features. These must be
// requested explicitly; they are not enabled by -all.
var experimental = map[string]bool{}
// setTrueCount record how many flags are explicitly set to true.
var setTrueCount int
// dirsRun and filesRun indicate whether the vet is applied to directory or
// file targets. The distinction affects which checks are run.
var dirsRun, filesRun bool
// includesNonTest indicates whether the vet is applied to non-test targets.
// Certain checks are relevant only if they touch both test and non-test files.
var includesNonTest bool
// A triState is a boolean that knows whether it has been set to either true or false.
// It is used to identify if a flag appears; the standard boolean flag cannot
// distinguish missing from unset. It also satisfies flag.Value.
type triState int
const (
unset triState = iota
setTrue
setFalse
)
func triStateFlag(name string, value triState, usage string) *triState {
flag.Var(&value, name, usage)
return &value
}
// triState implements flag.Value, flag.Getter, and flag.boolFlag.
// They work like boolean flags: we can say vet -printf as well as vet -printf=true
func (ts *triState) Get() interface{} {
return *ts == setTrue
}
func (ts triState) isTrue() bool {
return ts == setTrue
}
func (ts *triState) Set(value string) error {
b, err := strconv.ParseBool(value)
if err != nil {
return err
}
if b {
*ts = setTrue
setTrueCount++
} else {
*ts = setFalse
}
return nil
}
func (ts *triState) String() string {
switch *ts {
case unset:
return "true" // An unset flag will be set by -all, so defaults to true.
case setTrue:
return "true"
case setFalse:
return "false"
}
panic("not reached")
}
func (ts triState) IsBoolFlag() bool {
return true
}
// vet tells whether to report errors for the named check, a flag name.
func vet(name string) bool {
return report[name].isTrue()
}
// setExit sets the value for os.Exit when it is called, later. It
// remembers the highest value.
func setExit(err int) {
if err > exitCode {
exitCode = err
}
}
var (
// Each of these vars has a corresponding case in (*File).Visit.
assignStmt *ast.AssignStmt
binaryExpr *ast.BinaryExpr
callExpr *ast.CallExpr
compositeLit *ast.CompositeLit
exprStmt *ast.ExprStmt
field *ast.Field
funcDecl *ast.FuncDecl
funcLit *ast.FuncLit
genDecl *ast.GenDecl
interfaceType *ast.InterfaceType
rangeStmt *ast.RangeStmt
returnStmt *ast.ReturnStmt
// checkers is a two-level map.
// The outer level is keyed by a nil pointer, one of the AST vars above.
// The inner level is keyed by checker name.
checkers = make(map[ast.Node]map[string]func(*File, ast.Node))
)
func register(name, usage string, fn func(*File, ast.Node), types ...ast.Node) {
report[name] = triStateFlag(name, unset, usage)
for _, typ := range types {
m := checkers[typ]
if m == nil {
m = make(map[string]func(*File, ast.Node))
checkers[typ] = m
}
m[name] = fn
}
}
// Usage is a replacement usage function for the flags package.
func Usage() {
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\tvet [flags] directory...\n")
fmt.Fprintf(os.Stderr, "\tvet [flags] files... # Must be a single package\n")
fmt.Fprintf(os.Stderr, "By default, -all is set and all non-experimental checks are run.\n")
fmt.Fprintf(os.Stderr, "For more information run\n")
fmt.Fprintf(os.Stderr, "\tgo doc cmd/vet\n\n")
fmt.Fprintf(os.Stderr, "Flags:\n")
flag.PrintDefaults()
os.Exit(2)
}
// File is a wrapper for the state of a file used in the parser.
// The parse tree walkers are all methods of this type.
type File struct {
pkg *Package
fset *token.FileSet
name string
content []byte
file *ast.File
b bytes.Buffer // for use by methods
// Parsed package "foo" when checking package "foo_test"
basePkg *Package
// The objects that are receivers of a "String() string" method.
// This is used by the recursiveStringer method in print.go.
stringers map[*ast.Object]bool
// Registered checkers to run.
checkers map[ast.Node][]func(*File, ast.Node)
}
func main() {
flag.Usage = Usage
flag.Parse()
// If any flag is set, we run only those checks requested.
// If all flag is set true or if no flags are set true, set all the non-experimental ones
// not explicitly set (in effect, set the "-all" flag).
if setTrueCount == 0 || *all == setTrue {
for name, setting := range report {
if *setting == unset && !experimental[name] {
*setting = setTrue
}
}
}
tagList = strings.Split(*tags, ",")
initPrintFlags()
initUnusedFlags()
if flag.NArg() == 0 {
Usage()
}
for _, name := range flag.Args() {
// Is it a directory?
fi, err := os.Stat(name)
if err != nil {
warnf("error walking tree: %s", err)
continue
}
if fi.IsDir() {
dirsRun = true
} else {
filesRun = true
if !strings.HasSuffix(name, "_test.go") {
includesNonTest = true
}
}
}
if dirsRun && filesRun {
Usage()
}
if dirsRun {
for _, name := range flag.Args() {
if *noRecurse {
doPackageDir(name)
} else {
walkDir(name)
}
}
os.Exit(exitCode)
}
if doPackage(".", flag.Args(), nil) == nil {
warnf("no files checked")
}
os.Exit(exitCode)
}
// prefixDirectory places the directory name on the beginning of each name in the list.
func prefixDirectory(directory string, names []string) {
if directory != "." {
for i, name := range names {
names[i] = filepath.Join(directory, name)
}
}
}
// doPackageDir analyzes the single package found in the directory, if there is one,
// plus a test package, if there is one.
func doPackageDir(directory string) {
context := build.Default
if len(context.BuildTags) != 0 {
warnf("build tags %s previously set", context.BuildTags)
}
context.BuildTags = append(tagList, context.BuildTags...)
pkg, err := context.ImportDir(directory, 0)
if err != nil {
// If it's just that there are no go source files, that's fine.
if _, nogo := err.(*build.NoGoError); nogo {
return
}
// Non-fatal: we are doing a recursive walk and there may be other directories.
warnf("cannot process directory %s: %s", directory, err)
return
}
var names []string
names = append(names, pkg.GoFiles...)
names = append(names, pkg.CgoFiles...)
names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
names = append(names, pkg.SFiles...)
prefixDirectory(directory, names)
basePkg := doPackage(directory, names, nil)
// Is there also a "foo_test" package? If so, do that one as well.
if len(pkg.XTestGoFiles) > 0 {
names = pkg.XTestGoFiles
prefixDirectory(directory, names)
doPackage(directory, names, basePkg)
}
}
type Package struct {
path string
defs map[*ast.Ident]types.Object
uses map[*ast.Ident]types.Object
selectors map[*ast.SelectorExpr]*types.Selection
types map[ast.Expr]types.TypeAndValue
spans map[types.Object]Span
files []*File
typesPkg *types.Package
}
// doPackage analyzes the single package constructed from the named files.
// It returns the parsed Package or nil if none of the files have been checked.
func doPackage(directory string, names []string, basePkg *Package) *Package {
var files []*File
var astFiles []*ast.File
fs := token.NewFileSet()
for _, name := range names {
data, err := ioutil.ReadFile(name)
if err != nil {
// Warn but continue to next package.
warnf("%s: %s", name, err)
return nil
}
checkBuildTag(name, data)
var parsedFile *ast.File
if strings.HasSuffix(name, ".go") {
parsedFile, err = parser.ParseFile(fs, name, data, 0)
if err != nil {
warnf("%s: %s", name, err)
return nil
}
astFiles = append(astFiles, parsedFile)
}
files = append(files, &File{fset: fs, content: data, name: name, file: parsedFile})
}
if len(astFiles) == 0 {
return nil
}
pkg := new(Package)
pkg.path = astFiles[0].Name.Name
pkg.files = files
// Type check the package.
err := pkg.check(fs, astFiles)
if err != nil && *verbose {
warnf("%s", err)
}
// Check.
chk := make(map[ast.Node][]func(*File, ast.Node))
for typ, set := range checkers {
for name, fn := range set {
if vet(name) {
chk[typ] = append(chk[typ], fn)
}
}
}
for _, file := range files {
file.pkg = pkg
file.basePkg = basePkg
file.checkers = chk
if file.file != nil {
file.walkFile(file.name, file.file)
}
}
asmCheck(pkg)
return pkg
}
func visit(path string, f os.FileInfo, err error) error {
if err != nil {
warnf("walk error: %s", err)
return err
}
// One package per directory. Ignore the files themselves.
if !f.IsDir() {
return nil
}
doPackageDir(path)
return nil
}
func (pkg *Package) hasFileWithSuffix(suffix string) bool {
for _, f := range pkg.files {
if strings.HasSuffix(f.name, suffix) {
return true
}
}
return false
}
// walkDir recursively walks the tree looking for Go packages.
func walkDir(root string) {
filepath.Walk(root, visit)
}
// errorf formats the error to standard error, adding program
// identification and a newline, and exits.
func errorf(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
os.Exit(2)
}
// warnf formats the error to standard error, adding program
// identification and a newline, but does not exit.
func warnf(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
setExit(1)
}
// Println is fmt.Println guarded by -v.
func Println(args ...interface{}) {
if !*verbose {
return
}
fmt.Println(args...)
}
// Printf is fmt.Printf guarded by -v.
func Printf(format string, args ...interface{}) {
if !*verbose {
return
}
fmt.Printf(format+"\n", args...)
}
// Bad reports an error and sets the exit code..
func (f *File) Bad(pos token.Pos, args ...interface{}) {
f.Warn(pos, args...)
setExit(1)
}
// Badf reports a formatted error and sets the exit code.
func (f *File) Badf(pos token.Pos, format string, args ...interface{}) {
f.Warnf(pos, format, args...)
setExit(1)
}
// loc returns a formatted representation of the position.
func (f *File) loc(pos token.Pos) string {
if pos == token.NoPos {
return ""
}
// Do not print columns. Because the pos often points to the start of an
// expression instead of the inner part with the actual error, the
// precision can mislead.
posn := f.fset.Position(pos)
return fmt.Sprintf("%s:%d", posn.Filename, posn.Line)
}
// Warn reports an error but does not set the exit code.
func (f *File) Warn(pos token.Pos, args ...interface{}) {
fmt.Fprintf(os.Stderr, "%s: %s", f.loc(pos), fmt.Sprintln(args...))
}
// Warnf reports a formatted error but does not set the exit code.
func (f *File) Warnf(pos token.Pos, format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "%s: %s\n", f.loc(pos), fmt.Sprintf(format, args...))
}
// walkFile walks the file's tree.
func (f *File) walkFile(name string, file *ast.File) {
Println("Checking file", name)
ast.Walk(f, file)
}
// Visit implements the ast.Visitor interface.
func (f *File) Visit(node ast.Node) ast.Visitor {
var key ast.Node
switch node.(type) {
case *ast.AssignStmt:
key = assignStmt
case *ast.BinaryExpr:
key = binaryExpr
case *ast.CallExpr:
key = callExpr
case *ast.CompositeLit:
key = compositeLit
case *ast.ExprStmt:
key = exprStmt
case *ast.Field:
key = field
case *ast.FuncDecl:
key = funcDecl
case *ast.FuncLit:
key = funcLit
case *ast.GenDecl:
key = genDecl
case *ast.InterfaceType:
key = interfaceType
case *ast.RangeStmt:
key = rangeStmt
case *ast.ReturnStmt:
key = returnStmt
}
for _, fn := range f.checkers[key] {
fn(f, node)
}
return f
}
// gofmt returns a string representation of the expression.
func (f *File) gofmt(x ast.Expr) string {
f.b.Reset()
printer.Fprint(&f.b, f.fset, x)
return f.b.String()
}

182
tools/vendor/github.com/dnephin/govet/method.go generated vendored Normal file
View File

@@ -0,0 +1,182 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the code to check canonical methods.
package main
import (
"fmt"
"go/ast"
"go/printer"
"strings"
)
func init() {
register("methods",
"check that canonically named methods are canonically defined",
checkCanonicalMethod,
funcDecl, interfaceType)
}
type MethodSig struct {
args []string
results []string
}
// canonicalMethods lists the input and output types for Go methods
// that are checked using dynamic interface checks. Because the
// checks are dynamic, such methods would not cause a compile error
// if they have the wrong signature: instead the dynamic check would
// fail, sometimes mysteriously. If a method is found with a name listed
// here but not the input/output types listed here, vet complains.
//
// A few of the canonical methods have very common names.
// For example, a type might implement a Scan method that
// has nothing to do with fmt.Scanner, but we still want to check
// the methods that are intended to implement fmt.Scanner.
// To do that, the arguments that have a = prefix are treated as
// signals that the canonical meaning is intended: if a Scan
// method doesn't have a fmt.ScanState as its first argument,
// we let it go. But if it does have a fmt.ScanState, then the
// rest has to match.
var canonicalMethods = map[string]MethodSig{
// "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
"Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter
"GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder
"GobEncode": {[]string{}, []string{"[]byte", "error"}}, // gob.GobEncoder
"MarshalJSON": {[]string{}, []string{"[]byte", "error"}}, // json.Marshaler
"MarshalXML": {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler
"Peek": {[]string{"=int"}, []string{"[]byte", "error"}}, // image.reader (matching bufio.Reader)
"ReadByte": {[]string{}, []string{"byte", "error"}}, // io.ByteReader
"ReadFrom": {[]string{"=io.Reader"}, []string{"int64", "error"}}, // io.ReaderFrom
"ReadRune": {[]string{}, []string{"rune", "int", "error"}}, // io.RuneReader
"Scan": {[]string{"=fmt.ScanState", "rune"}, []string{"error"}}, // fmt.Scanner
"Seek": {[]string{"=int64", "int"}, []string{"int64", "error"}}, // io.Seeker
"UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}}, // json.Unmarshaler
"UnmarshalXML": {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler
"UnreadByte": {[]string{}, []string{"error"}},
"UnreadRune": {[]string{}, []string{"error"}},
"WriteByte": {[]string{"byte"}, []string{"error"}}, // jpeg.writer (matching bufio.Writer)
"WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo
}
func checkCanonicalMethod(f *File, node ast.Node) {
switch n := node.(type) {
case *ast.FuncDecl:
if n.Recv != nil {
canonicalMethod(f, n.Name, n.Type)
}
case *ast.InterfaceType:
for _, field := range n.Methods.List {
for _, id := range field.Names {
canonicalMethod(f, id, field.Type.(*ast.FuncType))
}
}
}
}
func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
// Expected input/output.
expect, ok := canonicalMethods[id.Name]
if !ok {
return
}
// Actual input/output
args := typeFlatten(t.Params.List)
var results []ast.Expr
if t.Results != nil {
results = typeFlatten(t.Results.List)
}
// Do the =s (if any) all match?
if !f.matchParams(expect.args, args, "=") || !f.matchParams(expect.results, results, "=") {
return
}
// Everything must match.
if !f.matchParams(expect.args, args, "") || !f.matchParams(expect.results, results, "") {
expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
if len(expect.results) == 1 {
expectFmt += " " + argjoin(expect.results)
} else if len(expect.results) > 1 {
expectFmt += " (" + argjoin(expect.results) + ")"
}
f.b.Reset()
if err := printer.Fprint(&f.b, f.fset, t); err != nil {
fmt.Fprintf(&f.b, "<%s>", err)
}
actual := f.b.String()
actual = strings.TrimPrefix(actual, "func")
actual = id.Name + actual
f.Badf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
}
}
func argjoin(x []string) string {
y := make([]string, len(x))
for i, s := range x {
if s[0] == '=' {
s = s[1:]
}
y[i] = s
}
return strings.Join(y, ", ")
}
// Turn parameter list into slice of types
// (in the ast, types are Exprs).
// Have to handle f(int, bool) and f(x, y, z int)
// so not a simple 1-to-1 conversion.
func typeFlatten(l []*ast.Field) []ast.Expr {
var t []ast.Expr
for _, f := range l {
if len(f.Names) == 0 {
t = append(t, f.Type)
continue
}
for _ = range f.Names {
t = append(t, f.Type)
}
}
return t
}
// Does each type in expect with the given prefix match the corresponding type in actual?
func (f *File) matchParams(expect []string, actual []ast.Expr, prefix string) bool {
for i, x := range expect {
if !strings.HasPrefix(x, prefix) {
continue
}
if i >= len(actual) {
return false
}
if !f.matchParamType(x, actual[i]) {
return false
}
}
if prefix == "" && len(actual) > len(expect) {
return false
}
return true
}
// Does this one type match?
func (f *File) matchParamType(expect string, actual ast.Expr) bool {
if strings.HasPrefix(expect, "=") {
expect = expect[1:]
}
// Strip package name if we're in that package.
if n := len(f.file.Name.Name); len(expect) > n && expect[:n] == f.file.Name.Name && expect[n] == '.' {
expect = expect[n+1:]
}
// Overkill but easy.
f.b.Reset()
printer.Fprint(&f.b, f.fset, actual)
return f.b.String() == expect
}

67
tools/vendor/github.com/dnephin/govet/nilfunc.go generated vendored Normal file
View File

@@ -0,0 +1,67 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for useless function comparisons.
A useless comparison is one like f == nil as opposed to f() == nil.
*/
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("nilfunc",
"check for comparisons between functions and nil",
checkNilFuncComparison,
binaryExpr)
}
func checkNilFuncComparison(f *File, node ast.Node) {
e := node.(*ast.BinaryExpr)
// Only want == or != comparisons.
if e.Op != token.EQL && e.Op != token.NEQ {
return
}
// Only want comparisons with a nil identifier on one side.
var e2 ast.Expr
switch {
case f.isNil(e.X):
e2 = e.Y
case f.isNil(e.Y):
e2 = e.X
default:
return
}
// Only want identifiers or selector expressions.
var obj types.Object
switch v := e2.(type) {
case *ast.Ident:
obj = f.pkg.uses[v]
case *ast.SelectorExpr:
obj = f.pkg.uses[v.Sel]
default:
return
}
// Only want functions.
if _, ok := obj.(*types.Func); !ok {
return
}
f.Badf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
}
// isNil reports whether the provided expression is the built-in nil
// identifier.
func (f *File) isNil(e ast.Expr) bool {
return f.pkg.types[e].Type == types.Typ[types.UntypedNil]
}

650
tools/vendor/github.com/dnephin/govet/print.go generated vendored Normal file
View File

@@ -0,0 +1,650 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the printf-checker.
package main
import (
"bytes"
"flag"
"go/ast"
"go/constant"
"go/token"
"go/types"
"strconv"
"strings"
"unicode/utf8"
)
var printfuncs = flag.String("printfuncs", "", "comma-separated list of print function names to check")
func init() {
register("printf",
"check printf-like invocations",
checkFmtPrintfCall,
funcDecl, callExpr)
}
func initPrintFlags() {
if *printfuncs == "" {
return
}
for _, name := range strings.Split(*printfuncs, ",") {
if len(name) == 0 {
flag.Usage()
}
// Backwards compatibility: skip optional first argument
// index after the colon.
if colon := strings.LastIndex(name, ":"); colon > 0 {
name = name[:colon]
}
name = strings.ToLower(name)
if name[len(name)-1] == 'f' {
isFormattedPrint[name] = true
} else {
isPrint[name] = true
}
}
}
// isFormattedPrint records the formatted-print functions. Names are
// lower-cased so the lookup is case insensitive.
var isFormattedPrint = map[string]bool{
"errorf": true,
"fatalf": true,
"fprintf": true,
"logf": true,
"panicf": true,
"printf": true,
"sprintf": true,
}
// isPrint records the unformatted-print functions. Names are lower-cased
// so the lookup is case insensitive.
var isPrint = map[string]bool{
"error": true,
"fatal": true,
"fprint": true,
"fprintln": true,
"log": true,
"panic": true,
"panicln": true,
"print": true,
"println": true,
"sprint": true,
"sprintln": true,
}
// formatString returns the format string argument and its index within
// the given printf-like call expression.
//
// The last parameter before variadic arguments is assumed to be
// a format string.
//
// The first string literal or string constant is assumed to be a format string
// if the call's signature cannot be determined.
//
// If it cannot find any format string parameter, it returns ("", -1).
func formatString(f *File, call *ast.CallExpr) (string, int) {
typ := f.pkg.types[call.Fun].Type
if typ != nil {
if sig, ok := typ.(*types.Signature); ok {
if !sig.Variadic() {
// Skip checking non-variadic functions
return "", -1
}
idx := sig.Params().Len() - 2
if idx < 0 {
// Skip checking variadic functions without
// fixed arguments.
return "", -1
}
s, ok := stringLiteralArg(f, call, idx)
if !ok {
// The last argument before variadic args isn't a string
return "", -1
}
return s, idx
}
}
// Cannot determine call's signature. Fallback to scanning for the first
// string argument in the call
for idx := range call.Args {
if s, ok := stringLiteralArg(f, call, idx); ok {
return s, idx
}
}
return "", -1
}
// stringLiteralArg returns call's string constant argument at the index idx.
//
// ("", false) is returned if call's argument at the index idx isn't a string
// literal.
func stringLiteralArg(f *File, call *ast.CallExpr, idx int) (string, bool) {
if idx >= len(call.Args) {
return "", false
}
arg := call.Args[idx]
lit := f.pkg.types[arg].Value
if lit != nil && lit.Kind() == constant.String {
return constant.StringVal(lit), true
}
return "", false
}
// checkCall triggers the print-specific checks if the call invokes a print function.
func checkFmtPrintfCall(f *File, node ast.Node) {
if d, ok := node.(*ast.FuncDecl); ok && isStringer(f, d) {
// Remember we saw this.
if f.stringers == nil {
f.stringers = make(map[*ast.Object]bool)
}
if l := d.Recv.List; len(l) == 1 {
if n := l[0].Names; len(n) == 1 {
f.stringers[n[0].Obj] = true
}
}
return
}
call, ok := node.(*ast.CallExpr)
if !ok {
return
}
var Name string
switch x := call.Fun.(type) {
case *ast.Ident:
Name = x.Name
case *ast.SelectorExpr:
Name = x.Sel.Name
default:
return
}
name := strings.ToLower(Name)
if _, ok := isFormattedPrint[name]; ok {
f.checkPrintf(call, Name)
return
}
if _, ok := isPrint[name]; ok {
f.checkPrint(call, Name)
return
}
}
// isStringer returns true if the provided declaration is a "String() string"
// method, an implementation of fmt.Stringer.
func isStringer(f *File, d *ast.FuncDecl) bool {
return d.Recv != nil && d.Name.Name == "String" && d.Type.Results != nil &&
len(d.Type.Params.List) == 0 && len(d.Type.Results.List) == 1 &&
f.pkg.types[d.Type.Results.List[0].Type].Type == types.Typ[types.String]
}
// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
// It is constructed by parsePrintfVerb.
type formatState struct {
verb rune // the format verb: 'd' for "%d"
format string // the full format directive from % through verb, "%.3d".
name string // Printf, Sprintf etc.
flags []byte // the list of # + etc.
argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
indexed bool // whether an indexing expression appears: %[1]d.
firstArg int // Index of first argument after the format in the Printf call.
// Used only during parse.
file *File
call *ast.CallExpr
argNum int // Which argument we're expecting to format now.
indexPending bool // Whether we have an indexed argument that has not resolved.
nbytes int // number of bytes of the format string consumed.
}
// checkPrintf checks a call to a formatted print routine such as Printf.
func (f *File) checkPrintf(call *ast.CallExpr, name string) {
format, idx := formatString(f, call)
if idx < 0 {
if *verbose {
f.Warn(call.Pos(), "can't check non-constant format in call to", name)
}
return
}
firstArg := idx + 1 // Arguments are immediately after format string.
if !strings.Contains(format, "%") {
if len(call.Args) > firstArg {
f.Badf(call.Pos(), "no formatting directive in %s call", name)
}
return
}
// Hard part: check formats against args.
argNum := firstArg
indexed := false
for i, w := 0, 0; i < len(format); i += w {
w = 1
if format[i] == '%' {
state := f.parsePrintfVerb(call, name, format[i:], firstArg, argNum)
if state == nil {
return
}
w = len(state.format)
if state.indexed {
indexed = true
}
if !f.okPrintfArg(call, state) { // One error per format is enough.
return
}
if len(state.argNums) > 0 {
// Continue with the next sequential argument.
argNum = state.argNums[len(state.argNums)-1] + 1
}
}
}
// Dotdotdot is hard.
if call.Ellipsis.IsValid() && argNum >= len(call.Args)-1 {
return
}
// If the arguments were direct indexed, we assume the programmer knows what's up.
// Otherwise, there should be no leftover arguments.
if !indexed && argNum != len(call.Args) {
expect := argNum - firstArg
numArgs := len(call.Args) - firstArg
f.Badf(call.Pos(), "wrong number of args for format in %s call: %d needed but %d args", name, expect, numArgs)
}
}
// parseFlags accepts any printf flags.
func (s *formatState) parseFlags() {
for s.nbytes < len(s.format) {
switch c := s.format[s.nbytes]; c {
case '#', '0', '+', '-', ' ':
s.flags = append(s.flags, c)
s.nbytes++
default:
return
}
}
}
// scanNum advances through a decimal number if present.
func (s *formatState) scanNum() {
for ; s.nbytes < len(s.format); s.nbytes++ {
c := s.format[s.nbytes]
if c < '0' || '9' < c {
return
}
}
}
// parseIndex scans an index expression. It returns false if there is a syntax error.
func (s *formatState) parseIndex() bool {
if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
return true
}
// Argument index present.
s.indexed = true
s.nbytes++ // skip '['
start := s.nbytes
s.scanNum()
if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index")
return false
}
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
if err != nil {
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index: %s", err)
return false
}
s.nbytes++ // skip ']'
arg := int(arg32)
arg += s.firstArg - 1 // We want to zero-index the actual arguments.
s.argNum = arg
s.indexPending = true
return true
}
// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
func (s *formatState) parseNum() bool {
if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
if s.indexPending { // Absorb it.
s.indexPending = false
}
s.nbytes++
s.argNums = append(s.argNums, s.argNum)
s.argNum++
} else {
s.scanNum()
}
return true
}
// parsePrecision scans for a precision. It returns false if there's a bad index expression.
func (s *formatState) parsePrecision() bool {
// If there's a period, there may be a precision.
if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
s.flags = append(s.flags, '.') // Treat precision as a flag.
s.nbytes++
if !s.parseIndex() {
return false
}
if !s.parseNum() {
return false
}
}
return true
}
// parsePrintfVerb looks the formatting directive that begins the format string
// and returns a formatState that encodes what the directive wants, without looking
// at the actual arguments present in the call. The result is nil if there is an error.
func (f *File) parsePrintfVerb(call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
state := &formatState{
format: format,
name: name,
flags: make([]byte, 0, 5),
argNum: argNum,
argNums: make([]int, 0, 1),
nbytes: 1, // There's guaranteed to be a percent sign.
indexed: false,
firstArg: firstArg,
file: f,
call: call,
}
// There may be flags.
state.parseFlags()
indexPending := false
// There may be an index.
if !state.parseIndex() {
return nil
}
// There may be a width.
if !state.parseNum() {
return nil
}
// There may be a precision.
if !state.parsePrecision() {
return nil
}
// Now a verb, possibly prefixed by an index (which we may already have).
if !indexPending && !state.parseIndex() {
return nil
}
if state.nbytes == len(state.format) {
f.Badf(call.Pos(), "missing verb at end of format string in %s call", name)
return nil
}
verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
state.verb = verb
state.nbytes += w
if verb != '%' {
state.argNums = append(state.argNums, state.argNum)
}
state.format = state.format[:state.nbytes]
return state
}
// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
type printfArgType int
const (
argBool printfArgType = 1 << iota
argInt
argRune
argString
argFloat
argComplex
argPointer
anyType printfArgType = ^0
)
type printVerb struct {
verb rune // User may provide verb through Formatter; could be a rune.
flags string // known flags are all ASCII
typ printfArgType
}
// Common flag sets for printf verbs.
const (
noFlag = ""
numFlag = " -+.0"
sharpNumFlag = " -+.0#"
allFlags = " -+.0#"
)
// printVerbs identifies which flags are known to printf for each verb.
// TODO: A type that implements Formatter may do what it wants, and vet
// will complain incorrectly.
var printVerbs = []printVerb{
// '-' is a width modifier, always valid.
// '.' is a precision for float, max width for strings.
// '+' is required sign for numbers, Go format for %v.
// '#' is alternate format for several verbs.
// ' ' is spacer for numbers
{'%', noFlag, 0},
{'b', numFlag, argInt | argFloat | argComplex},
{'c', "-", argRune | argInt},
{'d', numFlag, argInt},
{'e', numFlag, argFloat | argComplex},
{'E', numFlag, argFloat | argComplex},
{'f', numFlag, argFloat | argComplex},
{'F', numFlag, argFloat | argComplex},
{'g', numFlag, argFloat | argComplex},
{'G', numFlag, argFloat | argComplex},
{'o', sharpNumFlag, argInt},
{'p', "-#", argPointer},
{'q', " -+.0#", argRune | argInt | argString},
{'s', " -+.0", argString},
{'t', "-", argBool},
{'T', "-", anyType},
{'U', "-#", argRune | argInt},
{'v', allFlags, anyType},
{'x', sharpNumFlag, argRune | argInt | argString},
{'X', sharpNumFlag, argRune | argInt | argString},
}
// okPrintfArg compares the formatState to the arguments actually present,
// reporting any discrepancies it can discern. If the final argument is ellipsissed,
// there's little it can do for that.
func (f *File) okPrintfArg(call *ast.CallExpr, state *formatState) (ok bool) {
var v printVerb
found := false
// Linear scan is fast enough for a small list.
for _, v = range printVerbs {
if v.verb == state.verb {
found = true
break
}
}
if !found {
f.Badf(call.Pos(), "unrecognized printf verb %q", state.verb)
return false
}
for _, flag := range state.flags {
if !strings.ContainsRune(v.flags, rune(flag)) {
f.Badf(call.Pos(), "unrecognized printf flag for verb %q: %q", state.verb, flag)
return false
}
}
// Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
// but the final arg must be an integer.
trueArgs := 1
if state.verb == '%' {
trueArgs = 0
}
nargs := len(state.argNums)
for i := 0; i < nargs-trueArgs; i++ {
argNum := state.argNums[i]
if !f.argCanBeChecked(call, i, true, state) {
return
}
arg := call.Args[argNum]
if !f.matchArgType(argInt, nil, arg) {
f.Badf(call.Pos(), "arg %s for * in printf format not of type int", f.gofmt(arg))
return false
}
}
if state.verb == '%' {
return true
}
argNum := state.argNums[len(state.argNums)-1]
if !f.argCanBeChecked(call, len(state.argNums)-1, false, state) {
return false
}
arg := call.Args[argNum]
if f.isFunctionValue(arg) && state.verb != 'p' && state.verb != 'T' {
f.Badf(call.Pos(), "arg %s in printf call is a function value, not a function call", f.gofmt(arg))
return false
}
if !f.matchArgType(v.typ, nil, arg) {
typeString := ""
if typ := f.pkg.types[arg].Type; typ != nil {
typeString = typ.String()
}
f.Badf(call.Pos(), "arg %s for printf verb %%%c of wrong type: %s", f.gofmt(arg), state.verb, typeString)
return false
}
if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && f.recursiveStringer(arg) {
f.Badf(call.Pos(), "arg %s for printf causes recursive call to String method", f.gofmt(arg))
return false
}
return true
}
// recursiveStringer reports whether the provided argument is r or &r for the
// fmt.Stringer receiver identifier r.
func (f *File) recursiveStringer(e ast.Expr) bool {
if len(f.stringers) == 0 {
return false
}
var obj *ast.Object
switch e := e.(type) {
case *ast.Ident:
obj = e.Obj
case *ast.UnaryExpr:
if id, ok := e.X.(*ast.Ident); ok && e.Op == token.AND {
obj = id.Obj
}
}
// It's unlikely to be a recursive stringer if it has a Format method.
if typ := f.pkg.types[e].Type; typ != nil {
// Not a perfect match; see issue 6259.
if f.hasMethod(typ, "Format") {
return false
}
}
// We compare the underlying Object, which checks that the identifier
// is the one we declared as the receiver for the String method in
// which this printf appears.
return f.stringers[obj]
}
// isFunctionValue reports whether the expression is a function as opposed to a function call.
// It is almost always a mistake to print a function value.
func (f *File) isFunctionValue(e ast.Expr) bool {
if typ := f.pkg.types[e].Type; typ != nil {
_, ok := typ.(*types.Signature)
return ok
}
return false
}
// argCanBeChecked reports whether the specified argument is statically present;
// it may be beyond the list of arguments or in a terminal slice... argument, which
// means we can't see it.
func (f *File) argCanBeChecked(call *ast.CallExpr, formatArg int, isStar bool, state *formatState) bool {
argNum := state.argNums[formatArg]
if argNum < 0 {
// Shouldn't happen, so catch it with prejudice.
panic("negative arg num")
}
if argNum == 0 {
f.Badf(call.Pos(), `index value [0] for %s("%s"); indexes start at 1`, state.name, state.format)
return false
}
if argNum < len(call.Args)-1 {
return true // Always OK.
}
if call.Ellipsis.IsValid() {
return false // We just can't tell; there could be many more arguments.
}
if argNum < len(call.Args) {
return true
}
// There are bad indexes in the format or there are fewer arguments than the format needs.
// This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
f.Badf(call.Pos(), `missing argument for %s("%s"): format reads arg %d, have only %d args`, state.name, state.format, arg, len(call.Args)-state.firstArg)
return false
}
// checkPrint checks a call to an unformatted print routine such as Println.
func (f *File) checkPrint(call *ast.CallExpr, name string) {
firstArg := 0
typ := f.pkg.types[call.Fun].Type
if typ == nil {
// Skip checking functions with unknown type.
return
}
if sig, ok := typ.(*types.Signature); ok {
if !sig.Variadic() {
// Skip checking non-variadic functions.
return
}
params := sig.Params()
firstArg = params.Len() - 1
typ := params.At(firstArg).Type()
typ = typ.(*types.Slice).Elem()
it, ok := typ.(*types.Interface)
if !ok || !it.Empty() {
// Skip variadic functions accepting non-interface{} args.
return
}
}
args := call.Args
if len(args) <= firstArg {
// Skip calls without variadic args.
return
}
args = args[firstArg:]
// check for Println(os.Stderr, ...)
if firstArg == 0 {
if sel, ok := args[0].(*ast.SelectorExpr); ok {
if x, ok := sel.X.(*ast.Ident); ok {
if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
f.Badf(call.Pos(), "first argument to %s is %s.%s", name, x.Name, sel.Sel.Name)
}
}
}
}
arg := args[0]
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if strings.Contains(lit.Value, "%") {
f.Badf(call.Pos(), "possible formatting directive in %s call", name)
}
}
if strings.HasSuffix(name, "ln") {
// The last item, if a string, should not have a newline.
arg = args[len(args)-1]
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if strings.HasSuffix(lit.Value, `\n"`) {
f.Badf(call.Pos(), "%s call ends with newline", name)
}
}
}
for _, arg := range args {
if f.isFunctionValue(arg) {
f.Badf(call.Pos(), "arg %s in %s call is a function value, not a function call", f.gofmt(arg), name)
}
if f.recursiveStringer(arg) {
f.Badf(call.Pos(), "arg %s in %s call causes recursive call to String method", f.gofmt(arg), name)
}
}
}

74
tools/vendor/github.com/dnephin/govet/rangeloop.go generated vendored Normal file
View File

@@ -0,0 +1,74 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check range loop variables bound inside function
literals that are deferred or launched in new goroutines. We only check
instances where the defer or go statement is the last statement in the loop
body, as otherwise we would need whole program analysis.
For example:
for i, v := range s {
go func() {
println(i, v) // not what you might expect
}()
}
See: https://golang.org/doc/go_faq.html#closures_and_goroutines
*/
package main
import "go/ast"
func init() {
register("rangeloops",
"check that range loop variables are used correctly",
checkRangeLoop,
rangeStmt)
}
// checkRangeLoop walks the body of the provided range statement, checking if
// its index or value variables are used unsafely inside goroutines or deferred
// function literals.
func checkRangeLoop(f *File, node ast.Node) {
n := node.(*ast.RangeStmt)
key, _ := n.Key.(*ast.Ident)
val, _ := n.Value.(*ast.Ident)
if key == nil && val == nil {
return
}
sl := n.Body.List
if len(sl) == 0 {
return
}
var last *ast.CallExpr
switch s := sl[len(sl)-1].(type) {
case *ast.GoStmt:
last = s.Call
case *ast.DeferStmt:
last = s.Call
default:
return
}
lit, ok := last.Fun.(*ast.FuncLit)
if !ok {
return
}
ast.Inspect(lit.Body, func(n ast.Node) bool {
id, ok := n.(*ast.Ident)
if !ok || id.Obj == nil {
return true
}
if f.pkg.types[id].Type == nil {
// Not referring to a variable
return true
}
if key != nil && id.Obj == key.Obj || val != nil && id.Obj == val.Obj {
f.Bad(id.Pos(), "range variable", id.Name, "captured by func literal")
}
return true
})
}

246
tools/vendor/github.com/dnephin/govet/shadow.go generated vendored Normal file
View File

@@ -0,0 +1,246 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for shadowed variables.
A shadowed variable is a variable declared in an inner scope
with the same name and type as a variable in an outer scope,
and where the outer variable is mentioned after the inner one
is declared.
(This definition can be refined; the module generates too many
false positives and is not yet enabled by default.)
For example:
func BadRead(f *os.File, buf []byte) error {
var err error
for {
n, err := f.Read(buf) // shadows the function variable 'err'
if err != nil {
break // causes return of wrong value
}
foo(buf)
}
return err
}
*/
package main
import (
"flag"
"go/ast"
"go/token"
"go/types"
)
var strictShadowing = flag.Bool("shadowstrict", false, "whether to be strict about shadowing; can be noisy")
func init() {
register("shadow",
"check for shadowed variables (experimental; must be set explicitly)",
checkShadow,
assignStmt, genDecl)
experimental["shadow"] = true
}
func checkShadow(f *File, node ast.Node) {
switch n := node.(type) {
case *ast.AssignStmt:
checkShadowAssignment(f, n)
case *ast.GenDecl:
checkShadowDecl(f, n)
}
}
// Span stores the minimum range of byte positions in the file in which a
// given variable (types.Object) is mentioned. It is lexically defined: it spans
// from the beginning of its first mention to the end of its last mention.
// A variable is considered shadowed (if *strictShadowing is off) only if the
// shadowing variable is declared within the span of the shadowed variable.
// In other words, if a variable is shadowed but not used after the shadowed
// variable is declared, it is inconsequential and not worth complaining about.
// This simple check dramatically reduces the nuisance rate for the shadowing
// check, at least until something cleverer comes along.
//
// One wrinkle: A "naked return" is a silent use of a variable that the Span
// will not capture, but the compilers catch naked returns of shadowed
// variables so we don't need to.
//
// Cases this gets wrong (TODO):
// - If a for loop's continuation statement mentions a variable redeclared in
// the block, we should complain about it but don't.
// - A variable declared inside a function literal can falsely be identified
// as shadowing a variable in the outer function.
//
type Span struct {
min token.Pos
max token.Pos
}
// contains reports whether the position is inside the span.
func (s Span) contains(pos token.Pos) bool {
return s.min <= pos && pos < s.max
}
// growSpan expands the span for the object to contain the instance represented
// by the identifier.
func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) {
if *strictShadowing {
return // No need
}
pos := ident.Pos()
end := ident.End()
span, ok := pkg.spans[obj]
if ok {
if span.min > pos {
span.min = pos
}
if span.max < end {
span.max = end
}
} else {
span = Span{pos, end}
}
pkg.spans[obj] = span
}
// checkShadowAssignment checks for shadowing in a short variable declaration.
func checkShadowAssignment(f *File, a *ast.AssignStmt) {
if a.Tok != token.DEFINE {
return
}
if f.idiomaticShortRedecl(a) {
return
}
for _, expr := range a.Lhs {
ident, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return
}
checkShadowing(f, ident)
}
}
// idiomaticShortRedecl reports whether this short declaration can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
// Don't complain about deliberate redeclarations of the form
// i := i
// Such constructs are idiomatic in range loops to create a new variable
// for each iteration. Another example is
// switch n := n.(type)
if len(a.Rhs) != len(a.Lhs) {
return false
}
// We know it's an assignment, so the LHS must be all identifiers. (We check anyway.)
for i, expr := range a.Lhs {
lhs, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return true // Don't do any more processing.
}
switch rhs := a.Rhs[i].(type) {
case *ast.Ident:
if lhs.Name != rhs.Name {
return false
}
case *ast.TypeAssertExpr:
if id, ok := rhs.X.(*ast.Ident); ok {
if lhs.Name != id.Name {
return false
}
}
default:
return false
}
}
return true
}
// idiomaticRedecl reports whether this declaration spec can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
// Don't complain about deliberate redeclarations of the form
// var i, j = i, j
if len(d.Names) != len(d.Values) {
return false
}
for i, lhs := range d.Names {
if rhs, ok := d.Values[i].(*ast.Ident); ok {
if lhs.Name != rhs.Name {
return false
}
}
}
return true
}
// checkShadowDecl checks for shadowing in a general variable declaration.
func checkShadowDecl(f *File, d *ast.GenDecl) {
if d.Tok != token.VAR {
return
}
for _, spec := range d.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
return
}
// Don't complain about deliberate redeclarations of the form
// var i = i
if f.idiomaticRedecl(valueSpec) {
return
}
for _, ident := range valueSpec.Names {
checkShadowing(f, ident)
}
}
}
// checkShadowing checks whether the identifier shadows an identifier in an outer scope.
func checkShadowing(f *File, ident *ast.Ident) {
if ident.Name == "_" {
// Can't shadow the blank identifier.
return
}
obj := f.pkg.defs[ident]
if obj == nil {
return
}
// obj.Parent.Parent is the surrounding scope. If we can find another declaration
// starting from there, we have a shadowed identifier.
_, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos())
if shadowed == nil {
return
}
// Don't complain if it's shadowing a universe-declared identifier; that's fine.
if shadowed.Parent() == types.Universe {
return
}
if *strictShadowing {
// The shadowed identifier must appear before this one to be an instance of shadowing.
if shadowed.Pos() > ident.Pos() {
return
}
} else {
// Don't complain if the span of validity of the shadowed identifier doesn't include
// the shadowing identifier.
span, ok := f.pkg.spans[shadowed]
if !ok {
f.Badf(ident.Pos(), "internal error: no range for %q", ident.Name)
return
}
if !span.contains(ident.Pos()) {
return
}
}
// Don't complain if the types differ: that implies the programmer really wants two different things.
if types.Identical(obj.Type(), shadowed.Type()) {
f.Badf(ident.Pos(), "declaration of %q shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos()))
}
}

82
tools/vendor/github.com/dnephin/govet/shift.go generated vendored Normal file
View File

@@ -0,0 +1,82 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for suspicious shifts.
*/
package main
import (
"go/ast"
"go/constant"
"go/token"
"go/types"
)
func init() {
register("shift",
"check for useless shifts",
checkShift,
binaryExpr, assignStmt)
}
func checkShift(f *File, node ast.Node) {
switch node := node.(type) {
case *ast.BinaryExpr:
if node.Op == token.SHL || node.Op == token.SHR {
checkLongShift(f, node, node.X, node.Y)
}
case *ast.AssignStmt:
if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
return
}
if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
checkLongShift(f, node, node.Lhs[0], node.Rhs[0])
}
}
}
// checkLongShift checks if shift or shift-assign operations shift by more than
// the length of the underlying variable.
func checkLongShift(f *File, node ast.Node, x, y ast.Expr) {
v := f.pkg.types[y].Value
if v == nil {
return
}
amt, ok := constant.Int64Val(v)
if !ok {
return
}
t := f.pkg.types[x].Type
if t == nil {
return
}
b, ok := t.Underlying().(*types.Basic)
if !ok {
return
}
var size int64
var msg string
switch b.Kind() {
case types.Uint8, types.Int8:
size = 8
case types.Uint16, types.Int16:
size = 16
case types.Uint32, types.Int32:
size = 32
case types.Uint64, types.Int64:
size = 64
case types.Int, types.Uint, types.Uintptr:
// These types may be as small as 32 bits, but no smaller.
size = 32
msg = "might be "
default:
return
}
if amt >= size {
ident := f.gofmt(x)
f.Badf(node.Pos(), "%s %stoo small for shift of %d", ident, msg, amt)
}
}

122
tools/vendor/github.com/dnephin/govet/structtag.go generated vendored Normal file
View File

@@ -0,0 +1,122 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the test for canonical struct tags.
package main
import (
"errors"
"go/ast"
"reflect"
"strconv"
)
func init() {
register("structtags",
"check that struct field tags have canonical format and apply to exported fields as needed",
checkCanonicalFieldTag,
field)
}
// checkCanonicalFieldTag checks a struct field tag.
func checkCanonicalFieldTag(f *File, node ast.Node) {
field := node.(*ast.Field)
if field.Tag == nil {
return
}
tag, err := strconv.Unquote(field.Tag.Value)
if err != nil {
f.Badf(field.Pos(), "unable to read struct tag %s", field.Tag.Value)
return
}
if err := validateStructTag(tag); err != nil {
f.Badf(field.Pos(), "struct field tag %s not compatible with reflect.StructTag.Get: %s", field.Tag.Value, err)
}
// Check for use of json or xml tags with unexported fields.
// Embedded struct. Nothing to do for now, but that
// may change, depending on what happens with issue 7363.
if len(field.Names) == 0 {
return
}
if field.Names[0].IsExported() {
return
}
st := reflect.StructTag(tag)
for _, enc := range [...]string{"json", "xml"} {
if st.Get(enc) != "" {
f.Badf(field.Pos(), "struct field %s has %s tag but is not exported", field.Names[0].Name, enc)
return
}
}
}
var (
errTagSyntax = errors.New("bad syntax for struct tag pair")
errTagKeySyntax = errors.New("bad syntax for struct tag key")
errTagValueSyntax = errors.New("bad syntax for struct tag value")
)
// validateStructTag parses the struct tag and returns an error if it is not
// in the canonical format, which is a space-separated list of key:"value"
// settings. The value may contain spaces.
func validateStructTag(tag string) error {
// This code is based on the StructTag.Get code in package reflect.
for tag != "" {
// Skip leading space.
i := 0
for i < len(tag) && tag[i] == ' ' {
i++
}
tag = tag[i:]
if tag == "" {
break
}
// Scan to colon. A space, a quote or a control character is a syntax error.
// Strictly speaking, control chars include the range [0x7f, 0x9f], not just
// [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
// as it is simpler to inspect the tag's bytes than the tag's runes.
i = 0
for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
i++
}
if i == 0 {
return errTagKeySyntax
}
if i+1 >= len(tag) || tag[i] != ':' {
return errTagSyntax
}
if tag[i+1] != '"' {
return errTagValueSyntax
}
tag = tag[i+1:]
// Scan quoted string to find value.
i = 1
for i < len(tag) && tag[i] != '"' {
if tag[i] == '\\' {
i++
}
i++
}
if i >= len(tag) {
return errTagValueSyntax
}
qvalue := tag[:i+1]
tag = tag[i+1:]
if _, err := strconv.Unquote(qvalue); err != nil {
return errTagValueSyntax
}
}
return nil
}

187
tools/vendor/github.com/dnephin/govet/tests.go generated vendored Normal file
View File

@@ -0,0 +1,187 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"go/ast"
"go/types"
"strings"
"unicode"
"unicode/utf8"
)
func init() {
register("tests",
"check for common mistaken usages of tests/documentation examples",
checkTestFunctions,
funcDecl)
}
func isExampleSuffix(s string) bool {
r, size := utf8.DecodeRuneInString(s)
return size > 0 && unicode.IsLower(r)
}
func isTestSuffix(name string) bool {
if len(name) == 0 {
// "Test" is ok.
return true
}
r, _ := utf8.DecodeRuneInString(name)
return !unicode.IsLower(r)
}
func isTestParam(typ ast.Expr, wantType string) bool {
ptr, ok := typ.(*ast.StarExpr)
if !ok {
// Not a pointer.
return false
}
// No easy way of making sure it's a *testing.T or *testing.B:
// ensure the name of the type matches.
if name, ok := ptr.X.(*ast.Ident); ok {
return name.Name == wantType
}
if sel, ok := ptr.X.(*ast.SelectorExpr); ok {
return sel.Sel.Name == wantType
}
return false
}
func lookup(name string, scopes []*types.Scope) types.Object {
for _, scope := range scopes {
if o := scope.Lookup(name); o != nil {
return o
}
}
return nil
}
func extendedScope(f *File) []*types.Scope {
scopes := []*types.Scope{f.pkg.typesPkg.Scope()}
if f.basePkg != nil {
scopes = append(scopes, f.basePkg.typesPkg.Scope())
} else {
// If basePkg is not specified (e.g. when checking a single file) try to
// find it among imports.
pkgName := f.pkg.typesPkg.Name()
if strings.HasSuffix(pkgName, "_test") {
basePkgName := strings.TrimSuffix(pkgName, "_test")
for _, p := range f.pkg.typesPkg.Imports() {
if p.Name() == basePkgName {
scopes = append(scopes, p.Scope())
break
}
}
}
}
return scopes
}
func checkExample(fn *ast.FuncDecl, f *File, report reporter) {
fnName := fn.Name.Name
if params := fn.Type.Params; len(params.List) != 0 {
report("%s should be niladic", fnName)
}
if results := fn.Type.Results; results != nil && len(results.List) != 0 {
report("%s should return nothing", fnName)
}
if filesRun && !includesNonTest {
// The coherence checks between a test and the package it tests
// will report false positives if no non-test files have
// been provided.
return
}
if fnName == "Example" {
// Nothing more to do.
return
}
var (
exName = strings.TrimPrefix(fnName, "Example")
elems = strings.SplitN(exName, "_", 3)
ident = elems[0]
obj = lookup(ident, extendedScope(f))
)
if ident != "" && obj == nil {
// Check ExampleFoo and ExampleBadFoo.
report("%s refers to unknown identifier: %s", fnName, ident)
// Abort since obj is absent and no subsequent checks can be performed.
return
}
if len(elems) < 2 {
// Nothing more to do.
return
}
if ident == "" {
// Check Example_suffix and Example_BadSuffix.
if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
report("%s has malformed example suffix: %s", fnName, residual)
}
return
}
mmbr := elems[1]
if !isExampleSuffix(mmbr) {
// Check ExampleFoo_Method and ExampleFoo_BadMethod.
if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
report("%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
}
}
if len(elems) == 3 && !isExampleSuffix(elems[2]) {
// Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
report("%s has malformed example suffix: %s", fnName, elems[2])
}
}
func checkTest(fn *ast.FuncDecl, prefix string, report reporter) {
// Want functions with 0 results and 1 parameter.
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
fn.Type.Params == nil ||
len(fn.Type.Params.List) != 1 ||
len(fn.Type.Params.List[0].Names) > 1 {
return
}
// The param must look like a *testing.T or *testing.B.
if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) {
return
}
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
report("%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
}
}
type reporter func(format string, args ...interface{})
// checkTestFunctions walks Test, Benchmark and Example functions checking
// malformed names, wrong signatures and examples documenting inexistent
// identifiers.
func checkTestFunctions(f *File, node ast.Node) {
if !strings.HasSuffix(f.name, "_test.go") {
return
}
fn, ok := node.(*ast.FuncDecl)
if !ok || fn.Recv != nil {
// Ignore non-functions or functions with receivers.
return
}
report := func(format string, args ...interface{}) { f.Badf(node.Pos(), format, args...) }
switch {
case strings.HasPrefix(fn.Name.Name, "Example"):
checkExample(fn, f, report)
case strings.HasPrefix(fn.Name.Name, "Test"):
checkTest(fn, "Test", report)
case strings.HasPrefix(fn.Name.Name, "Benchmark"):
checkTest(fn, "Benchmark", report)
}
}

281
tools/vendor/github.com/dnephin/govet/types.go generated vendored Normal file
View File

@@ -0,0 +1,281 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the pieces of the tool that use typechecking from the go/types package.
package main
import (
"go/ast"
"go/importer"
"go/token"
"go/types"
)
// stdImporter is the importer we use to import packages.
// It is created during initialization so that all packages
// are imported by the same importer.
var stdImporter = importer.Default()
var (
errorType *types.Interface
stringerType *types.Interface // possibly nil
formatterType *types.Interface // possibly nil
)
func init() {
errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
if typ := importType("fmt", "Stringer"); typ != nil {
stringerType = typ.Underlying().(*types.Interface)
}
if typ := importType("fmt", "Formatter"); typ != nil {
formatterType = typ.Underlying().(*types.Interface)
}
}
// importType returns the type denoted by the qualified identifier
// path.name, and adds the respective package to the imports map
// as a side effect. In case of an error, importType returns nil.
func importType(path, name string) types.Type {
pkg, err := stdImporter.Import(path)
if err != nil {
// This can happen if the package at path hasn't been compiled yet.
warnf("import failed: %v", err)
return nil
}
if obj, ok := pkg.Scope().Lookup(name).(*types.TypeName); ok {
return obj.Type()
}
warnf("invalid type name %q", name)
return nil
}
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) error {
pkg.defs = make(map[*ast.Ident]types.Object)
pkg.uses = make(map[*ast.Ident]types.Object)
pkg.selectors = make(map[*ast.SelectorExpr]*types.Selection)
pkg.spans = make(map[types.Object]Span)
pkg.types = make(map[ast.Expr]types.TypeAndValue)
config := types.Config{
// We use the same importer for all imports to ensure that
// everybody sees identical packages for the given paths.
Importer: stdImporter,
// By providing a Config with our own error function, it will continue
// past the first error. There is no need for that function to do anything.
Error: func(error) {},
}
info := &types.Info{
Selections: pkg.selectors,
Types: pkg.types,
Defs: pkg.defs,
Uses: pkg.uses,
}
typesPkg, err := config.Check(pkg.path, fs, astFiles, info)
pkg.typesPkg = typesPkg
// update spans
for id, obj := range pkg.defs {
pkg.growSpan(id, obj)
}
for id, obj := range pkg.uses {
pkg.growSpan(id, obj)
}
return err
}
// matchArgType reports an error if printf verb t is not appropriate
// for operand arg.
//
// typ is used only for recursive calls; external callers must supply nil.
//
// (Recursion arises from the compound types {map,chan,slice} which
// may be printed with %d etc. if that is appropriate for their element
// types.)
func (f *File) matchArgType(t printfArgType, typ types.Type, arg ast.Expr) bool {
return f.matchArgTypeInternal(t, typ, arg, make(map[types.Type]bool))
}
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
// remembering what types are in progress so we don't recur when faced with recursive
// types or mutually recursive types.
func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
// %v, %T accept any argument type.
if t == anyType {
return true
}
if typ == nil {
// external call
typ = f.pkg.types[arg].Type
if typ == nil {
return true // probably a type check problem
}
}
// If the type implements fmt.Formatter, we have nothing to check.
// formatterTyp may be nil - be conservative and check for Format method in that case.
if formatterType != nil && types.Implements(typ, formatterType) || f.hasMethod(typ, "Format") {
return true
}
// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
if t&argString != 0 {
if types.AssertableTo(errorType, typ) || stringerType != nil && types.AssertableTo(stringerType, typ) {
return true
}
}
typ = typ.Underlying()
if inProgress[typ] {
// We're already looking at this type. The call that started it will take care of it.
return true
}
inProgress[typ] = true
switch typ := typ.(type) {
case *types.Signature:
return t&argPointer != 0
case *types.Map:
// Recur: map[int]int matches %d.
return t&argPointer != 0 ||
(f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress))
case *types.Chan:
return t&argPointer != 0
case *types.Array:
// Same as slice.
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
return true // %s matches []byte
}
// Recur: []int matches %d.
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem().Underlying(), arg, inProgress)
case *types.Slice:
// Same as array.
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
return true // %s matches []byte
}
// Recur: []int matches %d. But watch out for
// type T []T
// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)
case *types.Pointer:
// Ugly, but dealing with an edge case: a known pointer to an invalid type,
// probably something from a failed import.
if typ.Elem().String() == "invalid type" {
if *verbose {
f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg))
}
return true // special case
}
// If it's actually a pointer with %p, it prints as one.
if t == argPointer {
return true
}
// If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
return f.matchStructArgType(t, str, arg, inProgress)
}
// The rest can print with %p as pointers, or as integers with %x etc.
return t&(argInt|argPointer) != 0
case *types.Struct:
return f.matchStructArgType(t, typ, arg, inProgress)
case *types.Interface:
// If the static type of the argument is empty interface, there's little we can do.
// Example:
// func f(x interface{}) { fmt.Printf("%s", x) }
// Whether x is valid for %s depends on the type of the argument to f. One day
// we will be able to do better. For now, we assume that empty interface is OK
// but non-empty interfaces, with Stringer and Error handled above, are errors.
return typ.NumMethods() == 0
case *types.Basic:
switch typ.Kind() {
case types.UntypedBool,
types.Bool:
return t&argBool != 0
case types.UntypedInt,
types.Int,
types.Int8,
types.Int16,
types.Int32,
types.Int64,
types.Uint,
types.Uint8,
types.Uint16,
types.Uint32,
types.Uint64,
types.Uintptr:
return t&argInt != 0
case types.UntypedFloat,
types.Float32,
types.Float64:
return t&argFloat != 0
case types.UntypedComplex,
types.Complex64,
types.Complex128:
return t&argComplex != 0
case types.UntypedString,
types.String:
return t&argString != 0
case types.UnsafePointer:
return t&(argPointer|argInt) != 0
case types.UntypedRune:
return t&(argInt|argRune) != 0
case types.UntypedNil:
return t&argPointer != 0 // TODO?
case types.Invalid:
if *verbose {
f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg))
}
return true // Probably a type check problem.
}
panic("unreachable")
}
return false
}
// hasBasicType reports whether x's type is a types.Basic with the given kind.
func (f *File) hasBasicType(x ast.Expr, kind types.BasicKind) bool {
t := f.pkg.types[x].Type
if t != nil {
t = t.Underlying()
}
b, ok := t.(*types.Basic)
return ok && b.Kind() == kind
}
// matchStructArgType reports whether all the elements of the struct match the expected
// type. For instance, with "%d" all the elements must be printable with the "%d" format.
func (f *File) matchStructArgType(t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
for i := 0; i < typ.NumFields(); i++ {
if !f.matchArgTypeInternal(t, typ.Field(i).Type(), arg, inProgress) {
return false
}
}
return true
}
// hasMethod reports whether the type contains a method with the given name.
// It is part of the workaround for Formatters and should be deleted when
// that workaround is no longer necessary.
// TODO: This could be better once issue 6259 is fixed.
func (f *File) hasMethod(typ types.Type, name string) bool {
// assume we have an addressable variable of type typ
obj, _, _ := types.LookupFieldOrMethod(typ, true, f.pkg.typesPkg, name)
_, ok := obj.(*types.Func)
return ok
}

97
tools/vendor/github.com/dnephin/govet/unsafeptr.go generated vendored Normal file
View File

@@ -0,0 +1,97 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for invalid uintptr -> unsafe.Pointer conversions.
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("unsafeptr",
"check for misuse of unsafe.Pointer",
checkUnsafePointer,
callExpr)
}
func checkUnsafePointer(f *File, node ast.Node) {
x := node.(*ast.CallExpr)
if len(x.Args) != 1 {
return
}
if f.hasBasicType(x.Fun, types.UnsafePointer) && f.hasBasicType(x.Args[0], types.Uintptr) && !f.isSafeUintptr(x.Args[0]) {
f.Badf(x.Pos(), "possible misuse of unsafe.Pointer")
}
}
// isSafeUintptr reports whether x - already known to be a uintptr -
// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
// directly from an unsafe.Pointer via conversion and pointer arithmetic
// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
func (f *File) isSafeUintptr(x ast.Expr) bool {
switch x := x.(type) {
case *ast.ParenExpr:
return f.isSafeUintptr(x.X)
case *ast.SelectorExpr:
switch x.Sel.Name {
case "Data":
// reflect.SliceHeader and reflect.StringHeader are okay,
// but only if they are pointing at a real slice or string.
// It's not okay to do:
// var x SliceHeader
// x.Data = uintptr(unsafe.Pointer(...))
// ... use x ...
// p := unsafe.Pointer(x.Data)
// because in the middle the garbage collector doesn't
// see x.Data as a pointer and so x.Data may be dangling
// by the time we get to the conversion at the end.
// For now approximate by saying that *Header is okay
// but Header is not.
pt, ok := f.pkg.types[x.X].Type.(*types.Pointer)
if ok {
t, ok := pt.Elem().(*types.Named)
if ok && t.Obj().Pkg().Path() == "reflect" {
switch t.Obj().Name() {
case "StringHeader", "SliceHeader":
return true
}
}
}
}
case *ast.CallExpr:
switch len(x.Args) {
case 0:
// maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
sel, ok := x.Fun.(*ast.SelectorExpr)
if !ok {
break
}
switch sel.Sel.Name {
case "Pointer", "UnsafeAddr":
t, ok := f.pkg.types[sel.X].Type.(*types.Named)
if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
return true
}
}
case 1:
// maybe conversion of uintptr to unsafe.Pointer
return f.hasBasicType(x.Fun, types.Uintptr) && f.hasBasicType(x.Args[0], types.UnsafePointer)
}
case *ast.BinaryExpr:
switch x.Op {
case token.ADD, token.SUB:
return f.isSafeUintptr(x.X) && !f.isSafeUintptr(x.Y)
}
}
return false
}

93
tools/vendor/github.com/dnephin/govet/unused.go generated vendored Normal file
View File

@@ -0,0 +1,93 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file defines the check for unused results of calls to certain
// pure functions.
package main
import (
"flag"
"go/ast"
"go/token"
"go/types"
"strings"
)
var unusedFuncsFlag = flag.String("unusedfuncs",
"errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse",
"comma-separated list of functions whose results must be used")
var unusedStringMethodsFlag = flag.String("unusedstringmethods",
"Error,String",
"comma-separated list of names of methods of type func() string whose results must be used")
func init() {
register("unusedresult",
"check for unused result of calls to functions in -unusedfuncs list and methods in -unusedstringmethods list",
checkUnusedResult,
exprStmt)
}
// func() string
var sigNoArgsStringResult = types.NewSignature(nil, nil,
types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
false)
var unusedFuncs = make(map[string]bool)
var unusedStringMethods = make(map[string]bool)
func initUnusedFlags() {
commaSplit := func(s string, m map[string]bool) {
if s != "" {
for _, name := range strings.Split(s, ",") {
if len(name) == 0 {
flag.Usage()
}
m[name] = true
}
}
}
commaSplit(*unusedFuncsFlag, unusedFuncs)
commaSplit(*unusedStringMethodsFlag, unusedStringMethods)
}
func checkUnusedResult(f *File, n ast.Node) {
call, ok := unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
if !ok {
return // not a call statement
}
fun := unparen(call.Fun)
if f.pkg.types[fun].IsType() {
return // a conversion, not a call
}
selector, ok := fun.(*ast.SelectorExpr)
if !ok {
return // neither a method call nor a qualified ident
}
sel, ok := f.pkg.selectors[selector]
if ok && sel.Kind() == types.MethodVal {
// method (e.g. foo.String())
obj := sel.Obj().(*types.Func)
sig := sel.Type().(*types.Signature)
if types.Identical(sig, sigNoArgsStringResult) {
if unusedStringMethods[obj.Name()] {
f.Badf(call.Lparen, "result of (%s).%s call not used",
sig.Recv().Type(), obj.Name())
}
}
} else if !ok {
// package-qualified function (e.g. fmt.Errorf)
obj, _ := f.pkg.uses[selector.Sel]
if obj, ok := obj.(*types.Func); ok {
qname := obj.Pkg().Path() + "." + obj.Name()
if unusedFuncs[qname] {
f.Badf(call.Lparen, "result of %v call not used", qname)
}
}
}
}

View File

@@ -12,26 +12,36 @@ import (
"strings" "strings"
) )
var ( const invalidArgumentExitCode = 3
root string
dontRecurseFlag = flag.Bool("n", false, "don't recursively check paths") var dontRecurseFlag = flag.Bool("n", false, "don't recursively check paths")
)
func main() { func main() {
flag.Parse() flag.Parse()
if len(flag.Args()) != 1 {
if len(flag.Args()) == 0 {
fmt.Println("missing argument: filepath") fmt.Println("missing argument: filepath")
return os.Exit(invalidArgumentExitCode)
} }
var err error lintFailed := false
root, err = filepath.Abs(flag.Arg(0)) for _, path := range flag.Args() {
root, err := filepath.Abs(path)
if err != nil { if err != nil {
fmt.Printf("Error finding absolute path: %s", err) fmt.Printf("Error finding absolute path: %s", err)
return os.Exit(invalidArgumentExitCode)
}
if walkPath(root) {
lintFailed = true
}
}
if lintFailed {
os.Exit(1)
}
} }
errors := false func walkPath(root string) bool {
lintFailed := false
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil { if err != nil {
fmt.Printf("Error during filesystem walk: %v\n", err) fmt.Printf("Error during filesystem walk: %v\n", err)
@@ -51,13 +61,11 @@ func main() {
fset, _, ineff := checkPath(path) fset, _, ineff := checkPath(path)
for _, id := range ineff { for _, id := range ineff {
fmt.Printf("%s: ineffectual assignment to %s\n", fset.Position(id.Pos()), id.Name) fmt.Printf("%s: ineffectual assignment to %s\n", fset.Position(id.Pos()), id.Name)
errors = true lintFailed = true
} }
return nil return nil
}) })
if errors { return lintFailed
os.Exit(1)
}
} }
func checkPath(path string) (*token.FileSet, []*ast.CommentGroup, []*ast.Ident) { func checkPath(path string) (*token.FileSet, []*ast.CommentGroup, []*ast.Ident) {
@@ -223,15 +231,24 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
} }
case *ast.AssignStmt: case *ast.AssignStmt:
if n.Tok == token.QUO_ASSIGN || n.Tok == token.REM_ASSIGN {
bld.maybePanic()
}
for _, x := range n.Rhs { for _, x := range n.Rhs {
bld.walk(x) bld.walk(x)
} }
for _, x := range n.Lhs { for i, x := range n.Lhs {
if id, ok := ident(x); ok { if id, ok := ident(x); ok {
if n.Tok >= token.ADD_ASSIGN && n.Tok <= token.AND_NOT_ASSIGN { if n.Tok >= token.ADD_ASSIGN && n.Tok <= token.AND_NOT_ASSIGN {
bld.use(id) bld.use(id)
} }
// Don't treat explicit initialization to zero as assignment; it is often used as shorthand for a bare declaration.
if n.Tok == token.DEFINE && i < len(n.Rhs) && isZeroLiteral(n.Rhs[i]) {
bld.use(id)
} else {
bld.assign(id) bld.assign(id)
}
} else { } else {
bld.walk(x) bld.walk(x)
} }
@@ -277,7 +294,21 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
bld.use(id) bld.use(id)
} }
} }
case *ast.SendStmt:
bld.maybePanic()
return bld
case *ast.BinaryExpr:
if n.Op == token.EQL || n.Op == token.QUO || n.Op == token.REM {
bld.maybePanic()
}
return bld
case *ast.CallExpr:
bld.maybePanic()
return bld
case *ast.IndexExpr:
bld.maybePanic()
return bld
case *ast.UnaryExpr: case *ast.UnaryExpr:
id, ok := ident(n.X) id, ok := ident(n.X)
if ix, isIx := n.X.(*ast.IndexExpr); isIx { if ix, isIx := n.X.(*ast.IndexExpr); isIx {
@@ -291,6 +322,7 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
} }
return bld return bld
case *ast.SelectorExpr: case *ast.SelectorExpr:
bld.maybePanic()
// A method call (possibly delayed via a method value) might implicitly take // A method call (possibly delayed via a method value) might implicitly take
// the address of its receiver, causing it to escape. // the address of its receiver, causing it to escape.
// We can't do any better here without knowing the variable's type. // We can't do any better here without knowing the variable's type.
@@ -301,6 +333,7 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
} }
return bld return bld
case *ast.SliceExpr: case *ast.SliceExpr:
bld.maybePanic()
// We don't care about slicing into slices, but without type information we can do no better. // We don't care about slicing into slices, but without type information we can do no better.
if id, ok := ident(n.X); ok { if id, ok := ident(n.X); ok {
if v, ok := bld.vars[id.Obj]; ok { if v, ok := bld.vars[id.Obj]; ok {
@@ -308,6 +341,12 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
} }
} }
return bld return bld
case *ast.StarExpr:
bld.maybePanic()
return bld
case *ast.TypeAssertExpr:
bld.maybePanic()
return bld
default: default:
return bld return bld
@@ -315,6 +354,18 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor {
return nil return nil
} }
func isZeroLiteral(x ast.Expr) bool {
b, ok := x.(*ast.BasicLit)
if !ok {
return false
}
switch b.Value {
case "0", "0.0", "0.", ".0", `""`:
return true
}
return false
}
func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) { func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) {
for _, v := range bld.vars { for _, v := range bld.vars {
v.fundept++ v.fundept++
@@ -382,6 +433,22 @@ func (bld *builder) swtch(stmt ast.Stmt, cases []ast.Stmt) {
bld.breaks.pop() bld.breaks.pop()
} }
// An operation that might panic marks named function results as used.
func (bld *builder) maybePanic() {
if len(bld.results) == 0 {
return
}
res := bld.results[len(bld.results)-1]
if res == nil {
return
}
for _, f := range res.List {
for _, id := range f.Names {
bld.use(id)
}
}
}
func (bld *builder) newBlock(parents ...*block) *block { func (bld *builder) newBlock(parents ...*block) *block {
bld.block = &block{ops: map[*ast.Object][]operation{}} bld.block = &block{ops: map[*ast.Object][]operation{}}
for _, b := range parents { for _, b := range parents {

View File

@@ -99,7 +99,7 @@ in the importer.
However, you can use errcheck on packages that depend on those which use cgo. In However, you can use errcheck on packages that depend on those which use cgo. In
order for this to work you need to `go install` the cgo dependencies before running order for this to work you need to `go install` the cgo dependencies before running
errcheck on the dependant packages. errcheck on the dependent packages.
See https://github.com/kisielk/errcheck/issues/16 for more details. See https://github.com/kisielk/errcheck/issues/16 for more details.

View File

@@ -40,7 +40,7 @@ Enable Travis-CI on your github repository settings.
For a **public** github repository put below's `.travis.yml`. For a **public** github repository put below's `.travis.yml`.
``` ```yml
language: go language: go
sudo: false sudo: false
go: go:
@@ -48,14 +48,14 @@ go:
before_install: before_install:
- go get github.com/mattn/goveralls - go get github.com/mattn/goveralls
script: script:
- $HOME/gopath/bin/goveralls -service=travis-ci - $GOPATH/bin/goveralls -service=travis-ci
``` ```
For a **public** github repository, it is not necessary to define your repository key (`COVERALLS_TOKEN`). For a **public** github repository, it is not necessary to define your repository key (`COVERALLS_TOKEN`).
For a **private** github repository put below's `.travis.yml`. If you use **travis pro**, you need to specify `-service=travis-pro` instead of `-service=travis-ci`. For a **private** github repository put below's `.travis.yml`. If you use **travis pro**, you need to specify `-service=travis-pro` instead of `-service=travis-ci`.
``` ```yml
language: go language: go
sudo: false sudo: false
go: go:
@@ -63,7 +63,7 @@ go:
before_install: before_install:
- go get github.com/mattn/goveralls - go get github.com/mattn/goveralls
script: script:
- $HOME/gopath/bin/goveralls -service=travis-pro - $GOPATH/bin/goveralls -service=travis-pro
``` ```
Store your Coveralls API token in `Environment variables`. Store your Coveralls API token in `Environment variables`.
@@ -81,7 +81,7 @@ $ travis encrypt COVERALLS_TOKEN=your_token_goes_here --add env.global
travis will add `env` block as following example: travis will add `env` block as following example:
``` ```yml
env: env:
global: global:
secure: xxxxxxxxxxxxx secure: xxxxxxxxxxxxx

View File

@@ -48,14 +48,13 @@ func mergeProfs(pfss [][]*cover.Profile) []*cover.Profile {
ret := make([]*cover.Profile, 0, len(head)) ret := make([]*cover.Profile, 0, len(head))
for i, profile := range head { for i, profile := range head {
for _, ps := range rest { for _, ps := range rest {
// find profiles
if len(ps) == 0 { if len(ps) == 0 {
// no test files
continue continue
} else if len(ps) < i+1 { } else if len(ps) < i+1 {
log.Fatal("Profile length is different") continue
} } else if ps[i].FileName != profile.FileName {
if ps[i].FileName != profile.FileName { continue
log.Fatal("Profile FileName is different")
} }
profile.Blocks = mergeProfBlocks(profile.Blocks, ps[i].Blocks) profile.Blocks = mergeProfBlocks(profile.Blocks, ps[i].Blocks)
} }

View File

@@ -45,6 +45,7 @@ var (
extraFlags Flags extraFlags Flags
pkg = flag.String("package", "", "Go package") pkg = flag.String("package", "", "Go package")
verbose = flag.Bool("v", false, "Pass '-v' argument to 'go test' and output to stdout") verbose = flag.Bool("v", false, "Pass '-v' argument to 'go test' and output to stdout")
race = flag.Bool("race", false, "Pass '-race' argument to 'go test'")
debug = flag.Bool("debug", false, "Enable debug output") debug = flag.Bool("debug", false, "Enable debug output")
coverprof = flag.String("coverprofile", "", "If supplied, use a go cover profile (comma separated)") coverprof = flag.String("coverprofile", "", "If supplied, use a go cover profile (comma separated)")
covermode = flag.String("covermode", "count", "sent as covermode argument to go test") covermode = flag.String("covermode", "count", "sent as covermode argument to go test")
@@ -132,12 +133,18 @@ func getCoverage() ([]*SourceFile, error) {
outBuf := new(bytes.Buffer) outBuf := new(bytes.Buffer)
cmd.Stdout = outBuf cmd.Stdout = outBuf
cmd.Stderr = outBuf cmd.Stderr = outBuf
coverm := *covermode
args := []string{"go", "test", "-covermode", *covermode, "-coverprofile", f.Name(), coverpkg} if *race {
coverm = "atomic"
}
args := []string{"go", "test", "-covermode", coverm, "-coverprofile", f.Name(), coverpkg}
if *verbose { if *verbose {
args = append(args, "-v") args = append(args, "-v")
cmd.Stdout = os.Stdout cmd.Stdout = os.Stdout
} }
if *race {
args = append(args, "-race")
}
args = append(args, extraFlags...) args = append(args, extraFlags...)
args = append(args, line) args = append(args, line)
cmd.Args = args cmd.Args = args

27
tools/vendor/github.com/mdempsky/maligned/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,27 @@
Copyright (c) 2012 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

7
tools/vendor/github.com/mdempsky/maligned/README generated vendored Normal file
View File

@@ -0,0 +1,7 @@
Install:
go get github.com/mdempsky/maligned
Usage:
maligned cmd/compile/internal/gc cmd/link/internal/ld

229
tools/vendor/github.com/mdempsky/maligned/maligned.go generated vendored Normal file
View File

@@ -0,0 +1,229 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"fmt"
"go/ast"
"go/build"
"go/token"
"go/types"
"log"
"sort"
"github.com/kisielk/gotool"
"golang.org/x/tools/go/loader"
)
var fset = token.NewFileSet()
func main() {
flag.Parse()
importPaths := gotool.ImportPaths(flag.Args())
if len(importPaths) == 0 {
return
}
var conf loader.Config
conf.Fset = fset
for _, importPath := range importPaths {
conf.Import(importPath)
}
prog, err := conf.Load()
if err != nil {
log.Fatal(err)
}
for _, pkg := range prog.InitialPackages() {
for _, file := range pkg.Files {
ast.Inspect(file, func(node ast.Node) bool {
if s, ok := node.(*ast.StructType); ok {
malign(node.Pos(), pkg.Types[s].Type.(*types.Struct))
}
return true
})
}
}
}
func malign(pos token.Pos, str *types.Struct) {
wordSize := int64(8)
maxAlign := int64(8)
switch build.Default.GOARCH {
case "386", "arm":
wordSize, maxAlign = 4, 4
case "amd64p32":
wordSize = 4
}
s := gcSizes{wordSize, maxAlign}
sz, opt := s.Sizeof(str), optimalSize(str, &s)
if sz != opt {
fmt.Printf("%s: struct of size %d could be %d\n", fset.Position(pos), sz, opt)
}
}
func optimalSize(str *types.Struct, sizes *gcSizes) int64 {
nf := str.NumFields()
fields := make([]*types.Var, nf)
alignofs := make([]int64, nf)
sizeofs := make([]int64, nf)
for i := 0; i < nf; i++ {
fields[i] = str.Field(i)
ft := fields[i].Type()
alignofs[i] = sizes.Alignof(ft)
sizeofs[i] = sizes.Sizeof(ft)
}
sort.Sort(&byAlignAndSize{fields, alignofs, sizeofs})
return sizes.Sizeof(types.NewStruct(fields, nil))
}
type byAlignAndSize struct {
fields []*types.Var
alignofs []int64
sizeofs []int64
}
func (s *byAlignAndSize) Len() int { return len(s.fields) }
func (s *byAlignAndSize) Swap(i, j int) {
s.fields[i], s.fields[j] = s.fields[j], s.fields[i]
s.alignofs[i], s.alignofs[j] = s.alignofs[j], s.alignofs[i]
s.sizeofs[i], s.sizeofs[j] = s.sizeofs[j], s.sizeofs[i]
}
func (s *byAlignAndSize) Less(i, j int) bool {
// Place zero sized objects before non-zero sized objects.
if s.sizeofs[i] == 0 && s.sizeofs[j] != 0 {
return true
}
if s.sizeofs[j] == 0 && s.sizeofs[i] != 0 {
return false
}
// Next, place more tightly aligned objects before less tightly aligned objects.
if s.alignofs[i] != s.alignofs[j] {
return s.alignofs[i] > s.alignofs[j]
}
// Lastly, order by size.
if s.sizeofs[i] != s.sizeofs[j] {
return s.sizeofs[i] > s.sizeofs[j]
}
return false
}
// Code below based on go/types.StdSizes.
type gcSizes struct {
WordSize int64
MaxAlign int64
}
func (s *gcSizes) Alignof(T types.Type) int64 {
// NOTE: On amd64, complex64 is 8 byte aligned,
// even though float32 is only 4 byte aligned.
// For arrays and structs, alignment is defined in terms
// of alignment of the elements and fields, respectively.
switch t := T.Underlying().(type) {
case *types.Array:
// spec: "For a variable x of array type: unsafe.Alignof(x)
// is the same as unsafe.Alignof(x[0]), but at least 1."
return s.Alignof(t.Elem())
case *types.Struct:
// spec: "For a variable x of struct type: unsafe.Alignof(x)
// is the largest of the values unsafe.Alignof(x.f) for each
// field f of x, but at least 1."
max := int64(1)
for i, nf := 0, t.NumFields(); i < nf; i++ {
if a := s.Alignof(t.Field(i).Type()); a > max {
max = a
}
}
return max
}
a := s.Sizeof(T) // may be 0
// spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1."
if a < 1 {
return 1
}
if a > s.MaxAlign {
return s.MaxAlign
}
return a
}
var basicSizes = [...]byte{
types.Bool: 1,
types.Int8: 1,
types.Int16: 2,
types.Int32: 4,
types.Int64: 8,
types.Uint8: 1,
types.Uint16: 2,
types.Uint32: 4,
types.Uint64: 8,
types.Float32: 4,
types.Float64: 8,
types.Complex64: 8,
types.Complex128: 16,
}
func (s *gcSizes) Sizeof(T types.Type) int64 {
switch t := T.Underlying().(type) {
case *types.Basic:
k := t.Kind()
if int(k) < len(basicSizes) {
if s := basicSizes[k]; s > 0 {
return int64(s)
}
}
if k == types.String {
return s.WordSize * 2
}
case *types.Array:
n := t.Len()
if n == 0 {
return 0
}
a := s.Alignof(t.Elem())
z := s.Sizeof(t.Elem())
return align(z, a)*(n-1) + z
case *types.Slice:
return s.WordSize * 3
case *types.Struct:
nf := t.NumFields()
if nf == 0 {
return 0
}
var o int64
max := int64(1)
for i := 0; i < nf; i++ {
ft := t.Field(i).Type()
a, sz := s.Alignof(ft), s.Sizeof(ft)
if a > max {
max = a
}
if i == nf-1 && sz == 0 && o != 0 {
sz = 1
}
o = align(o, a) + sz
}
return align(o, max)
case *types.Interface:
return s.WordSize * 2
}
return s.WordSize // catch-all
}
// align returns the smallest y >= x such that y % a == 0.
func align(x, a int64) int64 {
y := x + a - 1
return y - y%a
}

View File

@@ -12,7 +12,7 @@ import (
"strings" "strings"
"github.com/mibk/dupl/job" "github.com/mibk/dupl/job"
"github.com/mibk/dupl/output" "github.com/mibk/dupl/printer"
"github.com/mibk/dupl/syntax" "github.com/mibk/dupl/syntax"
) )
@@ -20,18 +20,18 @@ const defaultThreshold = 15
var ( var (
paths = []string{"."} paths = []string{"."}
vendor = flag.Bool("vendor", false, "check files in vendor directory") vendor = flag.Bool("vendor", false, "")
verbose = flag.Bool("verbose", false, "explain what is being done") verbose = flag.Bool("verbose", false, "")
threshold = flag.Int("threshold", defaultThreshold, "minimum token sequence as a clone") threshold = flag.Int("threshold", defaultThreshold, "")
files = flag.Bool("files", false, "files names from stdin") files = flag.Bool("files", false, "")
html = flag.Bool("html", false, "html output") html = flag.Bool("html", false, "")
plumbing = flag.Bool("plumbing", false, "plumbing output for consumption by scripts or tools") plumbing = flag.Bool("plumbing", false, "")
) )
const ( const (
vendorDirPrefix = "vendor" + string(filepath.Separator) vendorDirPrefix = "vendor" + string(filepath.Separator)
vendorDirInPath = string(filepath.Separator) + "vendor" + string(filepath.Separator) vendorDirInPath = string(filepath.Separator) + vendorDirPrefix
) )
func init() { func init() {
@@ -39,43 +39,6 @@ func init() {
flag.IntVar(threshold, "t", defaultThreshold, "alias for -threshold") flag.IntVar(threshold, "t", defaultThreshold, "alias for -threshold")
} }
func usage() {
fmt.Fprintln(os.Stderr, `Usage of dupl:
dupl [flags] [paths]
Paths:
If the given path is a file, dupl will use it regardless of
the file extension. If it is a directory it will recursively
search for *.go files in that directory.
If no path is given dupl will recursively search for *.go
files in the current directory.
Flags:
-files
read file names from stdin one at each line
-html
output the results as HTML, including duplicate code fragments
-plumbing
plumbing (easy-to-parse) output for consumption by scripts or tools
-t, -threshold size
minimum token sequence size as a clone (default 15)
-vendor
check files in vendor directory
-v, -verbose
explain what is being done
Examples:
dupl -t 100
Search clones in the current directory of size at least
100 tokens.
dupl $(find app/ -name '*_test.go')
Search for clones in tests in the app directory.
find app/ -name '*_test.go' |dupl -files
The same as above.`)
os.Exit(2)
}
func main() { func main() {
flag.Usage = usage flag.Usage = usage
flag.Parse() flag.Parse()
@@ -110,7 +73,17 @@ func main() {
} }
close(duplChan) close(duplChan)
}() }()
printDupls(duplChan)
newPrinter := printer.NewText
if *html {
newPrinter = printer.NewHTML
} else if *plumbing {
newPrinter = printer.NewPlumbing
}
p := newPrinter(os.Stdout, ioutil.ReadFile)
if err := printDupls(p, duplChan); err != nil {
log.Fatal(err)
}
} }
func filesFeed() chan string { func filesFeed() chan string {
@@ -120,10 +93,7 @@ func filesFeed() chan string {
s := bufio.NewScanner(os.Stdin) s := bufio.NewScanner(os.Stdin)
for s.Scan() { for s.Scan() {
f := s.Text() f := s.Text()
if strings.HasPrefix(f, "./") { fchan <- strings.TrimPrefix(f, "./")
f = f[2:]
}
fchan <- f
} }
close(fchan) close(fchan)
}() }()
@@ -160,7 +130,7 @@ func crawlPaths(paths []string) chan string {
return fchan return fchan
} }
func printDupls(duplChan <-chan syntax.Match) { func printDupls(p printer.Printer, duplChan <-chan syntax.Match) error {
groups := make(map[string][][]*syntax.Node) groups := make(map[string][][]*syntax.Node)
for dupl := range duplChan { for dupl := range duplChan {
groups[dupl.Hash] = append(groups[dupl.Hash], dupl.Frags...) groups[dupl.Hash] = append(groups[dupl.Hash], dupl.Frags...)
@@ -171,32 +141,18 @@ func printDupls(duplChan <-chan syntax.Match) {
} }
sort.Strings(keys) sort.Strings(keys)
p := getPrinter() if err := p.PrintHeader(); err != nil {
return err
}
for _, k := range keys { for _, k := range keys {
uniq := unique(groups[k]) uniq := unique(groups[k])
if len(uniq) > 1 { if len(uniq) > 1 {
if err := p.Print(uniq); err != nil { if err := p.PrintClones(uniq); err != nil {
log.Fatal(err) return err
} }
} }
} }
p.Finish() return p.PrintFooter()
}
func getPrinter() output.Printer {
var fr fileReader
if *html {
return output.NewHTMLPrinter(os.Stdout, fr)
} else if *plumbing {
return output.NewPlumbingPrinter(os.Stdout, fr)
}
return output.NewTextPrinter(os.Stdout, fr)
}
type fileReader struct{}
func (fileReader) ReadFile(filename string) ([]byte, error) {
return ioutil.ReadFile(filename)
} }
func unique(group [][]*syntax.Node) [][]*syntax.Node { func unique(group [][]*syntax.Node) [][]*syntax.Node {
@@ -217,3 +173,39 @@ func unique(group [][]*syntax.Node) [][]*syntax.Node {
} }
return newGroup return newGroup
} }
func usage() {
fmt.Fprintln(os.Stderr, `Usage: dupl [flags] [paths]
Paths:
If the given path is a file, dupl will use it regardless of
the file extension. If it is a directory, it will recursively
search for *.go files in that directory.
If no path is given, dupl will recursively search for *.go
files in the current directory.
Flags:
-files
read file names from stdin one at each line
-html
output the results as HTML, including duplicate code fragments
-plumbing
plumbing (easy-to-parse) output for consumption by scripts or tools
-t, -threshold size
minimum token sequence size as a clone (default 15)
-vendor
check files in vendor directory
-v, -verbose
explain what is being done
Examples:
dupl -t 100
Search clones in the current directory of size at least
100 tokens.
dupl $(find app/ -name '*_test.go')
Search for clones in tests in the app directory.
find app/ -name '*_test.go' |dupl -files
The same as above.`)
os.Exit(2)
}

120
tools/vendor/github.com/mibk/dupl/printer/html.go generated vendored Normal file
View File

@@ -0,0 +1,120 @@
package printer
import (
"bytes"
"fmt"
"io"
"regexp"
"sort"
"github.com/mibk/dupl/syntax"
)
type html struct {
iota int
w io.Writer
ReadFile
}
func NewHTML(w io.Writer, fread ReadFile) Printer {
return &html{w: w, ReadFile: fread}
}
func (p *html) PrintHeader() error {
_, err := fmt.Fprint(p.w, `<!DOCTYPE html>
<meta charset="utf-8"/>
<title>Duplicates</title>
<style>
pre {
background-color: #FFD;
border: 1px solid #E2E2E2;
padding: 1ex;
}
</style>
`)
return err
}
func (p *html) PrintClones(dups [][]*syntax.Node) error {
p.iota++
fmt.Fprintf(p.w, "<h1>#%d found %d clones</h1>\n", p.iota, len(dups))
clones := make([]clone, len(dups))
for i, dup := range dups {
cnt := len(dup)
if cnt == 0 {
panic("zero length dup")
}
nstart := dup[0]
nend := dup[cnt-1]
file, err := p.ReadFile(nstart.Filename)
if err != nil {
return err
}
lineStart, _ := blockLines(file, nstart.Pos, nend.End)
cl := clone{filename: nstart.Filename, lineStart: lineStart}
start := findLineBeg(file, nstart.Pos)
content := append(toWhitespace(file[start:nstart.Pos]), file[nstart.Pos:nend.End]...)
cl.fragment = deindent(content)
clones[i] = cl
}
sort.Sort(byNameAndLine(clones))
for _, cl := range clones {
fmt.Fprintf(p.w, "<h2>%s:%d</h2>\n<pre>%s</pre>\n", cl.filename, cl.lineStart, cl.fragment)
}
return nil
}
func (*html) PrintFooter() error { return nil }
func findLineBeg(file []byte, index int) int {
for i := index; i >= 0; i-- {
if file[i] == '\n' {
return i + 1
}
}
return 0
}
func toWhitespace(str []byte) []byte {
var out []byte
for _, c := range bytes.Runes(str) {
if c == '\t' {
out = append(out, '\t')
} else {
out = append(out, ' ')
}
}
return out
}
func deindent(block []byte) []byte {
const maxVal = 99
min := maxVal
re := regexp.MustCompile(`(^|\n)(\t*)\S`)
for _, line := range re.FindAllSubmatch(block, -1) {
indent := line[2]
if len(indent) < min {
min = len(indent)
}
}
if min == 0 || min == maxVal {
return block
}
block = block[min:]
Loop:
for i := 0; i < len(block); i++ {
if block[i] == '\n' && i != len(block)-1 {
for j := 0; j < min; j++ {
if block[i+j+1] != '\t' {
continue Loop
}
}
block = append(block[:i+1], block[i+1+min:]...)
}
}
return block
}

36
tools/vendor/github.com/mibk/dupl/printer/plumbing.go generated vendored Normal file
View File

@@ -0,0 +1,36 @@
package printer
import (
"fmt"
"io"
"sort"
"github.com/mibk/dupl/syntax"
)
type plumbing struct {
w io.Writer
ReadFile
}
func NewPlumbing(w io.Writer, fread ReadFile) Printer {
return &plumbing{w, fread}
}
func (p *plumbing) PrintHeader() error { return nil }
func (p *plumbing) PrintClones(dups [][]*syntax.Node) error {
clones, err := prepareClonesInfo(p.ReadFile, dups)
if err != nil {
return err
}
sort.Sort(byNameAndLine(clones))
for i, cl := range clones {
nextCl := clones[(i+1)%len(clones)]
fmt.Fprintf(p.w, "%s:%d-%d: duplicate of %s:%d-%d\n", cl.filename, cl.lineStart, cl.lineEnd,
nextCl.filename, nextCl.lineStart, nextCl.lineEnd)
}
return nil
}
func (p *plumbing) PrintFooter() error { return nil }

11
tools/vendor/github.com/mibk/dupl/printer/printer.go generated vendored Normal file
View File

@@ -0,0 +1,11 @@
package printer
import "github.com/mibk/dupl/syntax"
type ReadFile func(filename string) ([]byte, error)
type Printer interface {
PrintHeader() error
PrintClones(dups [][]*syntax.Node) error
PrintFooter() error
}

100
tools/vendor/github.com/mibk/dupl/printer/text.go generated vendored Normal file
View File

@@ -0,0 +1,100 @@
package printer
import (
"fmt"
"io"
"sort"
"github.com/mibk/dupl/syntax"
)
type text struct {
cnt int
w io.Writer
ReadFile
}
func NewText(w io.Writer, fread ReadFile) Printer {
return &text{w: w, ReadFile: fread}
}
func (p *text) PrintHeader() error { return nil }
func (p *text) PrintClones(dups [][]*syntax.Node) error {
p.cnt++
fmt.Fprintf(p.w, "found %d clones:\n", len(dups))
clones, err := prepareClonesInfo(p.ReadFile, dups)
if err != nil {
return err
}
sort.Sort(byNameAndLine(clones))
for _, cl := range clones {
fmt.Fprintf(p.w, " %s:%d,%d\n", cl.filename, cl.lineStart, cl.lineEnd)
}
return nil
}
func (p *text) PrintFooter() error {
_, err := fmt.Fprintf(p.w, "\nFound total %d clone groups.\n", p.cnt)
return err
}
func prepareClonesInfo(fread ReadFile, dups [][]*syntax.Node) ([]clone, error) {
clones := make([]clone, len(dups))
for i, dup := range dups {
cnt := len(dup)
if cnt == 0 {
panic("zero length dup")
}
nstart := dup[0]
nend := dup[cnt-1]
file, err := fread(nstart.Filename)
if err != nil {
return nil, err
}
cl := clone{filename: nstart.Filename}
cl.lineStart, cl.lineEnd = blockLines(file, nstart.Pos, nend.End)
clones[i] = cl
}
return clones, nil
}
func blockLines(file []byte, from, to int) (int, int) {
line := 1
lineStart, lineEnd := 0, 0
for offset, b := range file {
if b == '\n' {
line++
}
if offset == from {
lineStart = line
}
if offset == to-1 {
lineEnd = line
break
}
}
return lineStart, lineEnd
}
type clone struct {
filename string
lineStart int
lineEnd int
fragment []byte
}
type byNameAndLine []clone
func (c byNameAndLine) Len() int { return len(c) }
func (c byNameAndLine) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
func (c byNameAndLine) Less(i, j int) bool {
if c[i].filename == c[j].filename {
return c[i].lineStart < c[j].lineStart
}
return c[i].filename < c[j].filename
}

View File

@@ -12,7 +12,6 @@ import (
"go/parser" "go/parser"
"go/token" "go/token"
"go/types" "go/types"
"io"
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
@@ -29,19 +28,15 @@ import (
"github.com/mvdan/lint" "github.com/mvdan/lint"
) )
func UnusedParams(tests, debug bool, args ...string) ([]string, error) { func UnusedParams(tests bool, args ...string) ([]string, error) {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
return nil, err return nil, err
} }
c := &Checker{ c := &Checker{
wd: wd, wd: wd, tests: tests,
tests: tests,
cachedDeclCounts: make(map[string]map[string]int), cachedDeclCounts: make(map[string]map[string]int),
} }
if debug {
c.debugLog = os.Stderr
}
return c.lines(args...) return c.lines(args...)
} }
@@ -52,7 +47,6 @@ type Checker struct {
wd string wd string
tests bool tests bool
debugLog io.Writer
cachedDeclCounts map[string]map[string]int cachedDeclCounts map[string]map[string]int
} }
@@ -107,12 +101,6 @@ func (c *Checker) ProgramSSA(prog *ssa.Program) {
c.prog = prog c.prog = prog
} }
func (c *Checker) debug(format string, a ...interface{}) {
if c.debugLog != nil {
fmt.Fprintf(c.debugLog, format, a...)
}
}
func (c *Checker) Check() ([]lint.Issue, error) { func (c *Checker) Check() ([]lint.Issue, error) {
wantPkg := make(map[*types.Package]*loader.PackageInfo) wantPkg := make(map[*types.Package]*loader.PackageInfo)
for _, info := range c.lprog.InitialPackages() { for _, info := range c.lprog.InitialPackages() {
@@ -133,9 +121,7 @@ funcLoop:
if info == nil { // not part of given pkgs if info == nil { // not part of given pkgs
continue continue
} }
c.debug("func %s\n", fn.String())
if dummyImpl(fn.Blocks[0]) { // panic implementation if dummyImpl(fn.Blocks[0]) { // panic implementation
c.debug(" skip - dummy implementation\n")
continue continue
} }
for _, edge := range cg.Nodes[fn].In { for _, edge := range cg.Nodes[fn].In {
@@ -144,29 +130,24 @@ funcLoop:
default: default:
// called via a parameter or field, type // called via a parameter or field, type
// is set in stone. // is set in stone.
c.debug(" skip - type is required via call\n")
continue funcLoop continue funcLoop
} }
} }
if c.multipleImpls(info, fn) { if c.multipleImpls(info, fn) {
c.debug(" skip - multiple implementations via build tags\n")
continue continue
} }
for i, par := range fn.Params { for i, par := range fn.Params {
if i == 0 && fn.Signature.Recv() != nil { // receiver if i == 0 && fn.Signature.Recv() != nil { // receiver
continue continue
} }
c.debug("%s\n", par.String())
switch par.Object().Name() { switch par.Object().Name() {
case "", "_": // unnamed case "", "_": // unnamed
c.debug(" skip - unnamed\n")
continue continue
} }
reason := "is unused" reason := "is unused"
if cv := receivesSameValue(cg.Nodes[fn].In, par, i); cv != nil { if cv := receivesSameValue(cg.Nodes[fn].In, par, i); cv != nil {
reason = fmt.Sprintf("always receives %v", cv) reason = fmt.Sprintf("always receives %v", cv)
} else if anyRealUse(par, i) { } else if anyRealUse(par, i) {
c.debug(" skip - used somewhere in the func body\n")
continue continue
} }
issues = append(issues, Issue{ issues = append(issues, Issue{
@@ -177,25 +158,15 @@ funcLoop:
} }
// TODO: replace by sort.Slice once we drop Go 1.7 support // TODO: replace by sort.Slice once we drop Go 1.7 support
sort.Sort(byNamePos{c.prog.Fset, issues}) sort.Sort(byPos(issues))
return issues, nil return issues, nil
} }
type byNamePos struct { type byPos []lint.Issue
fset *token.FileSet
l []lint.Issue
}
func (p byNamePos) Len() int { return len(p.l) } func (p byPos) Len() int { return len(p) }
func (p byNamePos) Swap(i, j int) { p.l[i], p.l[j] = p.l[j], p.l[i] } func (p byPos) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p byNamePos) Less(i, j int) bool { func (p byPos) Less(i, j int) bool { return p[i].Pos() < p[j].Pos() }
p1 := p.fset.Position(p.l[i].Pos())
p2 := p.fset.Position(p.l[j].Pos())
if p1.Filename == p2.Filename {
return p1.Offset < p2.Offset
}
return p1.Filename < p2.Filename
}
func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) constant.Value { func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) constant.Value {
if ast.IsExported(par.Parent().Name()) { if ast.IsExported(par.Parent().Name()) {
@@ -221,12 +192,14 @@ func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) consta
func anyRealUse(par *ssa.Parameter, pos int) bool { func anyRealUse(par *ssa.Parameter, pos int) bool {
refLoop: refLoop:
for _, ref := range *par.Referrers() { for _, ref := range *par.Referrers() {
switch x := ref.(type) { call, ok := ref.(*ssa.Call)
case *ssa.Call: if !ok {
if x.Call.Value != par.Parent() { return true
}
if call.Call.Value != par.Parent() {
return true // not a recursive call return true // not a recursive call
} }
for i, arg := range x.Call.Args { for i, arg := range call.Call.Args {
if arg != par { if arg != par {
continue continue
} }
@@ -236,32 +209,10 @@ refLoop:
} }
} }
return true return true
case *ssa.Store:
if insertedStore(x) {
continue // inserted by go/ssa, not from the code
}
return true
default:
return true
}
} }
return false return false
} }
func insertedStore(instr ssa.Instruction) bool {
if instr.Pos() != token.NoPos {
return false
}
store, ok := instr.(*ssa.Store)
if !ok {
return false
}
alloc, ok := store.Addr.(*ssa.Alloc)
// we want exactly one use of this alloc value for it to be
// inserted by ssa and dummy - the alloc instruction itself.
return ok && len(*alloc.Referrers()) == 1
}
var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`) var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`)
// dummyImpl reports whether a block is a dummy implementation. This is // dummyImpl reports whether a block is a dummy implementation. This is
@@ -270,15 +221,11 @@ var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`)
func dummyImpl(blk *ssa.BasicBlock) bool { func dummyImpl(blk *ssa.BasicBlock) bool {
var ops [8]*ssa.Value var ops [8]*ssa.Value
for _, instr := range blk.Instrs { for _, instr := range blk.Instrs {
if insertedStore(instr) {
continue // inserted by go/ssa, not from the code
}
for _, val := range instr.Operands(ops[:0]) { for _, val := range instr.Operands(ops[:0]) {
switch x := (*val).(type) { switch x := (*val).(type) {
case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc, case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc,
*ssa.MakeInterface, *ssa.Function, *ssa.MakeInterface, *ssa.Function,
*ssa.Global, *ssa.IndexAddr, *ssa.Slice, *ssa.Global, *ssa.IndexAddr, *ssa.Slice:
*ssa.UnOp:
case *ssa.Call: case *ssa.Call:
if rxHarmlessCall.MatchString(x.Call.Value.String()) { if rxHarmlessCall.MatchString(x.Call.Value.String()) {
continue continue

View File

@@ -11,18 +11,11 @@ import (
"github.com/mvdan/unparam/check" "github.com/mvdan/unparam/check"
) )
var ( var tests = flag.Bool("tests", true, "include tests")
tests = flag.Bool("tests", true, "include tests")
debug = flag.Bool("debug", false, "debug prints")
)
func main() { func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, "usage: unparam [flags] [package ...]")
flag.PrintDefaults()
}
flag.Parse() flag.Parse()
warns, err := check.UnusedParams(*tests, *debug, flag.Args()...) warns, err := check.UnusedParams(*tests, flag.Args()...)
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)

View File

@@ -9,123 +9,7 @@ Gosimple requires Go 1.6 or later.
go get honnef.co/go/tools/cmd/gosimple go get honnef.co/go/tools/cmd/gosimple
## Usage ## Documentation
Invoke `gosimple` with one or more filenames, a directory, or a package named Detailed documentation can be found on
by its import path. Gosimple uses the same [staticcheck.io](https://staticcheck.io/docs/gosimple).
[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as
the `go` command and therefore
also supports relative import paths like `./...`. Additionally the `...`
wildcard can be used as suffix on relative and absolute file paths to recurse
into them.
The output of this tool is a list of suggestions in Vim quickfix format,
which is accepted by lots of different editors.
## Purpose
Gosimple differs from golint in that gosimple focuses on simplifying
code, while golint flags common style issues. Furthermore, gosimple
always targets the latest Go version. If a new Go release adds a
simpler way of doing something, gosimple will suggest that way.
## Checks
Gosimple makes the following recommendations for avoiding unsimple
constructs:
| Check | Description | Suggestion |
|-------|-----------------------------------------------------------------------------|--------------------------------------------------------------------------|
| S1000 | `select{}` with a single case | Use a plain channel send or receive |
| S1001 | A loop copying elements of `s2` to `s1` | `copy(s1, s2)` |
| S1002 | `if b == true` | `if b` |
| S1003 | `strings.Index*(x, y) != -1` | `strings.Contains(x, y)` |
| S1004 | `bytes.Compare(x, y) == 0` | `bytes.Equal(x, y)` |
| S1005 | `for _ = range x` | `for range x` |
| S1006 | `for true {...}` | `for {...}` |
| S1007 | Using double quotes and escaping for regular expressions | Use raw strings |
| S1008 | `if <expr> { return <bool> }; return <bool>` | `return <expr>` |
| S1009 | Checking a slice against nil and also checking its length against zero | Nil slices are defined to have length zero, the nil check is redundant |
| S1010 | `s[a:len(s)]` | `s[a:]` |
| S1011 | A loop appending each element of `s2` to `s1` | `append(s1, s2...)` |
| S1012 | `time.Now().Sub(x)` | `time.Since(x)` |
| S1013 | `if err != nil { return err }; return nil` | `return err` |
| S1014 | `_ = <-x` | `<-x` |
| S1015 | Using `strconv.FormatInt` when `strconv.Atoi` would be more straightforward | |
| S1016 | Converting two struct types by manually copying each field | A type conversion: `T(v)` |
| S1017 | `if strings.HasPrefix` + string slicing | Call `strings.TrimPrefix` unconditionally |
| S1018 | A loop sliding elements in a slice to the beginning | `copy(s[:n], s[offset:])` |
| S1019 | `make(T, 0)` or `make(T, x, x)` | `make(T)` or `make(T, x)` |
| S1020 | `if _, ok := i.(T); ok && i != nil` | `if _, ok := i.(T); ok` |
| S1021 | `var x uint; x = 1` | `var x uint = 1` |
| S1022 | `x, _ = someMap[key]` | `x = someMap[key]` |
| S1023 | `break` as the final statement of a `case` clause | Go doesn't have automatic fallthrough, making final `break` redundant |
| S1024 | `t.Sub(time.Now())` | `time.Until(t)` |
| S1025 | `fmt.Sprintf("%s", x)` where `x` is already a string | `x` |
| | `fmt.Sprintf("%s", x)` where `x`'s underlying type is a string | `string(x)` |
| | `fmt.Sprintf("%s", x)` where `x` has a String method | `x.String()` |
| S1026 | Copies of strings, like `string([]byte(x))` or `"" + x` | `x` |
| S1027 | `return` as the final statement of a func body with no return values | Functions that don't return anything don't need a final return statement |
| S1028 | `errors.New(fmt.Sprintf(...))` | `fmt.Errorf(...)` |
| S1029 | `for _, r := range []rune(s)` | `for _, r := range s` |
| S1030 | `string(buf.Bytes())` or `[]byte(buf.String())` | Use the appropriate method of `bytes.Buffer` instead |
## gofmt -r
Some of these rules can be automatically applied via `gofmt -r`:
```
strings.IndexRune(a, b) > -1 -> strings.ContainsRune(a, b)
strings.IndexRune(a, b) >= 0 -> strings.ContainsRune(a, b)
strings.IndexRune(a, b) != -1 -> strings.ContainsRune(a, b)
strings.IndexRune(a, b) == -1 -> !strings.ContainsRune(a, b)
strings.IndexRune(a, b) < 0 -> !strings.ContainsRune(a, b)
strings.IndexAny(a, b) > -1 -> strings.ContainsAny(a, b)
strings.IndexAny(a, b) >= 0 -> strings.ContainsAny(a, b)
strings.IndexAny(a, b) != -1 -> strings.ContainsAny(a, b)
strings.IndexAny(a, b) == -1 -> !strings.ContainsAny(a, b)
strings.IndexAny(a, b) < 0 -> !strings.ContainsAny(a, b)
strings.Index(a, b) > -1 -> strings.Contains(a, b)
strings.Index(a, b) >= 0 -> strings.Contains(a, b)
strings.Index(a, b) != -1 -> strings.Contains(a, b)
strings.Index(a, b) == -1 -> !strings.Contains(a, b)
strings.Index(a, b) < 0 -> !strings.Contains(a, b)
bytes.Index(a, b) > -1 -> bytes.Contains(a, b)
bytes.Index(a, b) >= 0 -> bytes.Contains(a, b)
bytes.Index(a, b) != -1 -> bytes.Contains(a, b)
bytes.Index(a, b) == -1 -> !bytes.Contains(a, b)
bytes.Index(a, b) < 0 -> !bytes.Contains(a, b)
bytes.Compare(a, b) == 0 -> bytes.Equal(a, b)
bytes.Compare(a, b) != 0 -> !bytes.Equal(a, b)
time.Now().Sub(a) -> time.Since(a)
a.Sub(time.Now()) -> time.Until(a)
```
## Ignoring checks
gosimple allows disabling some or all checks for certain files. The
`-ignore` flag takes a whitespace-separated list of
`glob:check1,check2,...` pairs. `glob` is a glob pattern matching
files in packages, and `check1,check2,...` are checks named by their
IDs.
For example, to ignore uses of strconv.FormatInt in all test files in the
`os/exec` package, you would write `-ignore
"os/exec/*_test.go:S1015"`
Additionally, the check IDs support globbing, too. Using a pattern
such as `os/exec/*.gen.go:*` would disable all checks in all
auto-generated files in the os/exec package.
Any whitespace can be used to separate rules, including newlines. This
allows for a setup like the following:
```
$ cat stdlib.ignore
sync/*_test.go:S1000
testing/benchmark.go:S1016
runtime/string_test.go:S1005
$ gosimple -ignore "$(cat stdlib.ignore)" std
```

View File

@@ -13,6 +13,9 @@ func main() {
fs.Parse(os.Args[1:]) fs.Parse(os.Args[1:])
c := simple.NewChecker() c := simple.NewChecker()
c.CheckGenerated = *gen c.CheckGenerated = *gen
cfg := lintutil.CheckerConfig{
lintutil.ProcessFlagSet(c, fs) Checker: c,
ExitNonZero: true,
}
lintutil.ProcessFlagSet([]lintutil.CheckerConfig{cfg}, fs)
} }

View File

@@ -4,42 +4,23 @@ package main // import "honnef.co/go/tools/cmd/megacheck"
import ( import (
"os" "os"
"honnef.co/go/tools/lint"
"honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/lint/lintutil"
"honnef.co/go/tools/simple" "honnef.co/go/tools/simple"
"honnef.co/go/tools/staticcheck" "honnef.co/go/tools/staticcheck"
"honnef.co/go/tools/unused" "honnef.co/go/tools/unused"
) )
type Checker struct {
Checkers []lint.Checker
}
func (c *Checker) Init(prog *lint.Program) {
for _, cc := range c.Checkers {
cc.Init(prog)
}
}
func (c *Checker) Funcs() map[string]lint.Func {
fns := map[string]lint.Func{}
for _, cc := range c.Checkers {
for k, v := range cc.Funcs() {
fns[k] = v
}
}
return fns
}
func main() { func main() {
var flags struct { var flags struct {
staticcheck struct { staticcheck struct {
enabled bool enabled bool
generated bool generated bool
exitNonZero bool
} }
gosimple struct { gosimple struct {
enabled bool enabled bool
generated bool generated bool
exitNonZero bool
} }
unused struct { unused struct {
enabled bool enabled bool
@@ -51,6 +32,7 @@ func main() {
debug string debug string
wholeProgram bool wholeProgram bool
reflection bool reflection bool
exitNonZero bool
} }
} }
fs := lintutil.FlagSet("megacheck") fs := lintutil.FlagSet("megacheck")
@@ -58,11 +40,15 @@ func main() {
"simple.enabled", true, "Run gosimple") "simple.enabled", true, "Run gosimple")
fs.BoolVar(&flags.gosimple.generated, fs.BoolVar(&flags.gosimple.generated,
"simple.generated", false, "Check generated code") "simple.generated", false, "Check generated code")
fs.BoolVar(&flags.gosimple.exitNonZero,
"simple.exit-non-zero", false, "Exit non-zero if any problems were found")
fs.BoolVar(&flags.staticcheck.enabled, fs.BoolVar(&flags.staticcheck.enabled,
"staticcheck.enabled", true, "Run staticcheck") "staticcheck.enabled", true, "Run staticcheck")
fs.BoolVar(&flags.staticcheck.generated, fs.BoolVar(&flags.staticcheck.generated,
"staticcheck.generated", false, "Check generated code (only applies to a subset of checks)") "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)")
fs.BoolVar(&flags.staticcheck.exitNonZero,
"staticcheck.exit-non-zero", true, "Exit non-zero if any problems were found")
fs.BoolVar(&flags.unused.enabled, fs.BoolVar(&flags.unused.enabled,
"unused.enabled", true, "Run unused") "unused.enabled", true, "Run unused")
@@ -78,22 +64,31 @@ func main() {
"unused.vars", true, "Report unused variables") "unused.vars", true, "Report unused variables")
fs.BoolVar(&flags.unused.wholeProgram, fs.BoolVar(&flags.unused.wholeProgram,
"unused.exported", false, "Treat arguments as a program and report unused exported identifiers") "unused.exported", false, "Treat arguments as a program and report unused exported identifiers")
fs.BoolVar(&flags.unused.reflection, "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") fs.BoolVar(&flags.unused.reflection,
"unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection")
fs.BoolVar(&flags.unused.exitNonZero,
"unused.exit-non-zero", true, "Exit non-zero if any problems were found")
fs.Parse(os.Args[1:]) fs.Parse(os.Args[1:])
c := &Checker{} var checkers []lintutil.CheckerConfig
if flags.staticcheck.enabled { if flags.staticcheck.enabled {
sac := staticcheck.NewChecker() sac := staticcheck.NewChecker()
sac.CheckGenerated = flags.staticcheck.generated sac.CheckGenerated = flags.staticcheck.generated
c.Checkers = append(c.Checkers, sac) checkers = append(checkers, lintutil.CheckerConfig{
Checker: sac,
ExitNonZero: flags.staticcheck.exitNonZero,
})
} }
if flags.gosimple.enabled { if flags.gosimple.enabled {
sc := simple.NewChecker() sc := simple.NewChecker()
sc.CheckGenerated = flags.gosimple.generated sc.CheckGenerated = flags.gosimple.generated
c.Checkers = append(c.Checkers, sc) checkers = append(checkers, lintutil.CheckerConfig{
Checker: sc,
ExitNonZero: flags.gosimple.exitNonZero,
})
} }
if flags.unused.enabled { if flags.unused.enabled {
@@ -116,8 +111,12 @@ func main() {
uc := unused.NewChecker(mode) uc := unused.NewChecker(mode)
uc.WholeProgram = flags.unused.wholeProgram uc.WholeProgram = flags.unused.wholeProgram
uc.ConsiderReflection = flags.unused.reflection uc.ConsiderReflection = flags.unused.reflection
c.Checkers = append(c.Checkers, unused.NewLintChecker(uc)) checkers = append(checkers, lintutil.CheckerConfig{
Checker: unused.NewLintChecker(uc),
ExitNonZero: flags.unused.exitNonZero,
})
} }
lintutil.ProcessFlagSet(c, fs) lintutil.ProcessFlagSet(checkers, fs)
} }

View File

@@ -1,76 +0,0 @@
# staticcheck
_staticcheck_ is `go vet` on steroids, applying a ton of static analysis
checks you might be used to from tools like ReSharper for C#.
## Installation
Staticcheck requires Go 1.6 or later.
go get honnef.co/go/tools/cmd/staticcheck
## Usage
Invoke `staticcheck` with one or more filenames, a directory, or a package named
by its import path. Staticcheck uses the same
[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as
the `go` command and therefore
also supports relative import paths like `./...`. Additionally the `...`
wildcard can be used as suffix on relative and absolute file paths to recurse
into them.
The output of this tool is a list of suggestions in Vim quickfix format,
which is accepted by lots of different editors.
## Purpose
The main purpose of staticcheck is editor integration, or workflow
integration in general. For example, by running staticcheck when
saving a file, one can quickly catch simple bugs without having to run
the whole test suite or the program itself.
The tool shouldn't report any errors unless there are legitimate
bugs - or very dubious constructs - in the code.
It is similar in nature to `go vet`, but has more checks that catch
bugs that would also be caught easily at runtime, to reduce the number
of edit, compile and debug cycles.
## Checks
The following things are currently checked by staticcheck:
[CHECKS PLACEHOLDER]
## Ignoring checks
staticcheck allows disabling some or all checks for certain files. The
`-ignore` flag takes a whitespace-separated list of
`glob:check1,check2,...` pairs. `glob` is a glob pattern matching
files in packages, and `check1,check2,...` are checks named by their
IDs.
For example, to ignore assignment to nil maps in all test files in the
`os/exec` package, you would write `-ignore
"os/exec/*_test.go:SA5000"`
Additionally, the check IDs support globbing, too. Using a pattern
such as `os/exec/*.gen.go:*` would disable all checks in all
auto-generated files in the os/exec package.
Any whitespace can be used to separate rules, including newlines. This
allows for a setup like the following:
```
$ cat stdlib.ignore
sync/*_test.go:SA2001
testing/benchmark.go:SA3001
runtime/string_test.go:SA4007
runtime/proc_test.go:SA5004
runtime/lfstack_test.go:SA4010
runtime/append_test.go:SA4010
errors/errors_test.go:SA4000
reflect/all_test.go:SA4000
$ staticcheck -ignore "$(cat stdlib.ignore)" std
```

View File

@@ -1,48 +0,0 @@
#!/bin/sh
build() {
echo "|Check|Description|"
echo "|---|---|"
for cat in docs/categories/*; do
catname=$(basename "$cat")
catdesc=$(cat "$cat")
echo "|**$catname???**|**$catdesc**|"
for check in docs/checks/"$catname"*; do
checkname=$(basename "$check")
checktitle=$(head -1 "$check")
checkdesc=$(tail -n +3 "$check")
if [ -n "$checkdesc" ]; then
echo "|[$checkname](#$checkname)|$checktitle|"
else
echo "|$checkname|$checktitle|"
fi
done
echo "|||"
done
echo
for check in docs/checks/*; do
checkname=$(basename "$check")
checktitle=$(head -1 "$check")
checkdesc=$(tail -n +3 "$check")
if [ -n "$checkdesc" ]; then
echo "### <a id=\"$checkname\">$checkname $checktitle"
echo
echo "$checkdesc"
fi
done
}
output=$(build)
while IFS= read -r line; do
if [ "$line" = "[CHECKS PLACEHOLDER]" ]; then
echo "$output"
else
echo "$line"
fi
done < README.md.template

View File

@@ -15,5 +15,9 @@ func main() {
fs.Parse(os.Args[1:]) fs.Parse(os.Args[1:])
c := staticcheck.NewChecker() c := staticcheck.NewChecker()
c.CheckGenerated = *gen c.CheckGenerated = *gen
lintutil.ProcessFlagSet(c, fs) cfg := lintutil.CheckerConfig{
Checker: c,
ExitNonZero: true,
}
lintutil.ProcessFlagSet([]lintutil.CheckerConfig{cfg}, fs)
} }

View File

@@ -70,5 +70,9 @@ func main() {
checker := newChecker(mode) checker := newChecker(mode)
l := unused.NewLintChecker(checker) l := unused.NewLintChecker(checker)
lintutil.ProcessFlagSet(l, fs) cfg := lintutil.CheckerConfig{
Checker: l,
ExitNonZero: true,
}
lintutil.ProcessFlagSet([]lintutil.CheckerConfig{cfg}, fs)
} }

View File

@@ -0,0 +1,54 @@
package deprecated
type Deprecation struct {
DeprecatedSince int
AlternativeAvailableSince int
}
var Stdlib = map[string]Deprecation{
"image/jpeg.Reader": {4, 0},
// FIXME(dh): AllowBinary isn't being detected as deprecated
// because the comment has a newline right after "Deprecated:"
"go/build.AllowBinary": {7, 7},
"(archive/zip.FileHeader).CompressedSize": {1, 1},
"(archive/zip.FileHeader).UncompressedSize": {1, 1},
"(go/doc.Package).Bugs": {1, 1},
"os.SEEK_SET": {7, 7},
"os.SEEK_CUR": {7, 7},
"os.SEEK_END": {7, 7},
"(net.Dialer).Cancel": {7, 7},
"runtime.CPUProfile": {9, 0},
"compress/flate.ReadError": {6, 6},
"compress/flate.WriteError": {6, 6},
"path/filepath.HasPrefix": {0, 0},
"(net/http.Transport).Dial": {7, 7},
"(*net/http.Transport).CancelRequest": {6, 5},
"net/http.ErrWriteAfterFlush": {7, 0},
"net/http.ErrHeaderTooLong": {8, 0},
"net/http.ErrShortBody": {8, 0},
"net/http.ErrMissingContentLength": {8, 0},
"net/http/httputil.ErrPersistEOF": {0, 0},
"net/http/httputil.ErrClosed": {0, 0},
"net/http/httputil.ErrPipeline": {0, 0},
"net/http/httputil.ServerConn": {0, 0},
"net/http/httputil.NewServerConn": {0, 0},
"net/http/httputil.ClientConn": {0, 0},
"net/http/httputil.NewClientConn": {0, 0},
"net/http/httputil.NewProxyClientConn": {0, 0},
"(net/http.Request).Cancel": {7, 7},
"(text/template/parse.PipeNode).Line": {1, 1},
"(text/template/parse.ActionNode).Line": {1, 1},
"(text/template/parse.BranchNode).Line": {1, 1},
"(text/template/parse.TemplateNode).Line": {1, 1},
"database/sql/driver.ColumnConverter": {9, 9},
"database/sql/driver.Execer": {8, 8},
"database/sql/driver.Queryer": {8, 8},
"(database/sql/driver.Conn).Begin": {8, 8},
"(database/sql/driver.Stmt).Exec": {8, 8},
"(database/sql/driver.Stmt).Query": {8, 8},
"syscall.StringByteSlice": {1, 1},
"syscall.StringBytePtr": {1, 1},
"syscall.StringSlicePtr": {1, 1},
"syscall.StringToUTF16": {1, 1},
"syscall.StringToUTF16Ptr": {1, 1},
}

View File

@@ -11,6 +11,14 @@ import (
) )
var stdlibDescs = map[string]Description{ var stdlibDescs = map[string]Description{
"errors.New": Description{Pure: true},
"fmt.Errorf": Description{Pure: true},
"fmt.Sprintf": Description{Pure: true},
"fmt.Sprint": Description{Pure: true},
"sort.Reverse": Description{Pure: true},
"strings.Map": Description{Pure: true}, "strings.Map": Description{Pure: true},
"strings.Repeat": Description{Pure: true}, "strings.Repeat": Description{Pure: true},
"strings.Replace": Description{Pure: true}, "strings.Replace": Description{Pure: true},
@@ -40,6 +48,8 @@ var stdlibDescs = map[string]Description{
type Description struct { type Description struct {
// The function is known to be pure // The function is known to be pure
Pure bool Pure bool
// The function is known to be a stub
Stub bool
// The function is known to never return (panics notwithstanding) // The function is known to never return (panics notwithstanding)
Infinite bool Infinite bool
// Variable ranges // Variable ranges
@@ -82,6 +92,7 @@ func (d *Descriptions) Get(fn *ssa.Function) Description {
{ {
fd.result = stdlibDescs[fn.RelString(nil)] fd.result = stdlibDescs[fn.RelString(nil)]
fd.result.Pure = fd.result.Pure || d.IsPure(fn) fd.result.Pure = fd.result.Pure || d.IsPure(fn)
fd.result.Stub = fd.result.Stub || d.IsStub(fn)
fd.result.Infinite = fd.result.Infinite || !terminates(fn) fd.result.Infinite = fd.result.Infinite || !terminates(fn)
fd.result.Ranges = vrp.BuildGraph(fn).Solve() fd.result.Ranges = vrp.BuildGraph(fn).Solve()
fd.result.Loops = findLoops(fn) fd.result.Loops = findLoops(fn)

View File

@@ -5,9 +5,41 @@ import (
"go/types" "go/types"
"honnef.co/go/tools/callgraph" "honnef.co/go/tools/callgraph"
"honnef.co/go/tools/lint"
"honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa"
) )
// IsStub reports whether a function is a stub. A function is
// considered a stub if it has no instructions or exactly one
// instruction, which must be either returning only constant values or
// a panic.
func (d *Descriptions) IsStub(fn *ssa.Function) bool {
if len(fn.Blocks) == 0 {
return true
}
if len(fn.Blocks) > 1 {
return false
}
instrs := lint.FilterDebug(fn.Blocks[0].Instrs)
if len(instrs) != 1 {
return false
}
switch instrs[0].(type) {
case *ssa.Return:
// Since this is the only instruction, the return value must
// be a constant. We consider all constants as stubs, not just
// the zero value. This does not, unfortunately, cover zero
// initialised structs, as these cause additional
// instructions.
return true
case *ssa.Panic:
return true
default:
return false
}
}
func (d *Descriptions) IsPure(fn *ssa.Function) bool { func (d *Descriptions) IsPure(fn *ssa.Function) bool {
if fn.Signature.Results().Len() == 0 { if fn.Signature.Results().Len() == 0 {
// A function with no return values is empty or is doing some // A function with no return values is empty or is doing some

View File

@@ -1,138 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gcsizes provides a types.Sizes implementation that adheres
// to the rules used by the gc compiler.
package gcsizes // import "honnef.co/go/tools/gcsizes"
import (
"go/build"
"go/types"
)
type Sizes struct {
WordSize int64
MaxAlign int64
}
// ForArch returns a correct Sizes for the given architecture.
func ForArch(arch string) *Sizes {
wordSize := int64(8)
maxAlign := int64(8)
switch build.Default.GOARCH {
case "386", "arm":
wordSize, maxAlign = 4, 4
case "amd64p32":
wordSize = 4
}
return &Sizes{WordSize: wordSize, MaxAlign: maxAlign}
}
func (s *Sizes) Alignof(T types.Type) int64 {
switch t := T.Underlying().(type) {
case *types.Array:
return s.Alignof(t.Elem())
case *types.Struct:
max := int64(1)
n := t.NumFields()
var fields []*types.Var
for i := 0; i < n; i++ {
fields = append(fields, t.Field(i))
}
for _, f := range fields {
if a := s.Alignof(f.Type()); a > max {
max = a
}
}
return max
}
a := s.Sizeof(T) // may be 0
if a < 1 {
return 1
}
if a > s.MaxAlign {
return s.MaxAlign
}
return a
}
func (s *Sizes) Offsetsof(fields []*types.Var) []int64 {
offsets := make([]int64, len(fields))
var o int64
for i, f := range fields {
a := s.Alignof(f.Type())
o = align(o, a)
offsets[i] = o
o += s.Sizeof(f.Type())
}
return offsets
}
var basicSizes = [...]byte{
types.Bool: 1,
types.Int8: 1,
types.Int16: 2,
types.Int32: 4,
types.Int64: 8,
types.Uint8: 1,
types.Uint16: 2,
types.Uint32: 4,
types.Uint64: 8,
types.Float32: 4,
types.Float64: 8,
types.Complex64: 8,
types.Complex128: 16,
}
func (s *Sizes) Sizeof(T types.Type) int64 {
switch t := T.Underlying().(type) {
case *types.Basic:
k := t.Kind()
if int(k) < len(basicSizes) {
if s := basicSizes[k]; s > 0 {
return int64(s)
}
}
if k == types.String {
return s.WordSize * 2
}
case *types.Array:
n := t.Len()
if n == 0 {
return 0
}
a := s.Alignof(t.Elem())
z := s.Sizeof(t.Elem())
return align(z, a)*(n-1) + z
case *types.Slice:
return s.WordSize * 3
case *types.Struct:
n := t.NumFields()
if n == 0 {
return 0
}
var fields []*types.Var
for i := 0; i < n; i++ {
fields = append(fields, t.Field(i))
}
offsets := s.Offsetsof(fields)
a := s.Alignof(T)
lsz := s.Sizeof(fields[n-1].Type())
if lsz == 0 {
lsz = 1
}
z := offsets[n-1] + lsz
return align(z, a)
case *types.Interface:
return s.WordSize * 2
}
return s.WordSize // catch-all
}
// align returns the smallest y >= x such that y % a == 0.
func align(x, a int64) int64 {
y := x + a - 1
return y - y%a
}

View File

@@ -11,6 +11,7 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"go/ast" "go/ast"
"go/build"
"go/constant" "go/constant"
"go/printer" "go/printer"
"go/token" "go/token"
@@ -20,6 +21,7 @@ import (
"sort" "sort"
"strings" "strings"
"sync" "sync"
"unicode"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
@@ -30,15 +32,85 @@ import (
type Job struct { type Job struct {
Program *Program Program *Program
checker string
check string check string
problems []Problem problems []Problem
} }
type Ignore struct { type Ignore interface {
Match(p Problem) bool
}
type LineIgnore struct {
File string
Line int
Checks []string
matched bool
pos token.Pos
}
func (li *LineIgnore) Match(p Problem) bool {
if p.Position.Filename != li.File || p.Position.Line != li.Line {
return false
}
for _, c := range li.Checks {
if m, _ := filepath.Match(c, p.Check); m {
li.matched = true
return true
}
}
return false
}
func (li *LineIgnore) String() string {
matched := "not matched"
if li.matched {
matched = "matched"
}
return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched)
}
type FileIgnore struct {
File string
Checks []string
}
func (fi *FileIgnore) Match(p Problem) bool {
if p.Position.Filename != fi.File {
return false
}
for _, c := range fi.Checks {
if m, _ := filepath.Match(c, p.Check); m {
return true
}
}
return false
}
type GlobIgnore struct {
Pattern string Pattern string
Checks []string Checks []string
} }
func (gi *GlobIgnore) Match(p Problem) bool {
if gi.Pattern != "*" {
pkgpath := p.Package.Path()
if strings.HasSuffix(pkgpath, "_test") {
pkgpath = pkgpath[:len(pkgpath)-len("_test")]
}
name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename))
if m, _ := filepath.Match(gi.Pattern, name); !m {
return false
}
}
for _, c := range gi.Checks {
if m, _ := filepath.Match(c, p.Check); m {
return true
}
}
return false
}
type Program struct { type Program struct {
SSA *ssa.Program SSA *ssa.Program
Prog *loader.Program Prog *loader.Program
@@ -58,15 +130,25 @@ type Func func(*Job)
// Problem represents a problem in some source code. // Problem represents a problem in some source code.
type Problem struct { type Problem struct {
Position token.Pos // position in source file pos token.Pos
Position token.Position // position in source file
Text string // the prose that describes the problem Text string // the prose that describes the problem
Check string
Checker string
Package *types.Package
Ignored bool
} }
func (p *Problem) String() string { func (p *Problem) String() string {
if p.Check == "" {
return p.Text return p.Text
} }
return fmt.Sprintf("%s (%s)", p.Text, p.Check)
}
type Checker interface { type Checker interface {
Name() string
Prefix() string
Init(*Program) Init(*Program)
Funcs() map[string]Func Funcs() map[string]Func
} }
@@ -76,33 +158,42 @@ type Linter struct {
Checker Checker Checker Checker
Ignores []Ignore Ignores []Ignore
GoVersion int GoVersion int
ReturnIgnored bool
automaticIgnores []Ignore
} }
func (l *Linter) ignore(j *Job, p Problem) bool { func (l *Linter) ignore(p Problem) bool {
tf := j.Program.SSA.Fset.File(p.Position) ignored := false
f := j.Program.tokenFileMap[tf] for _, ig := range l.automaticIgnores {
pkg := j.Program.astFileMap[f].Pkg // We cannot short-circuit these, as we want to record, for
// each ignore, whether it matched or not.
if ig.Match(p) {
ignored = true
}
}
if ignored {
// no need to execute other ignores if we've already had a
// match.
return true
}
for _, ig := range l.Ignores { for _, ig := range l.Ignores {
pkgpath := pkg.Path() // We can short-circuit here, as we aren't tracking any
if strings.HasSuffix(pkgpath, "_test") { // information.
pkgpath = pkgpath[:len(pkgpath)-len("_test")] if ig.Match(p) {
}
name := filepath.Join(pkgpath, filepath.Base(tf.Name()))
if m, _ := filepath.Match(ig.Pattern, name); !m {
continue
}
for _, c := range ig.Checks {
if m, _ := filepath.Match(c, j.check); m {
return true return true
} }
} }
}
return false return false
} }
func (prog *Program) File(node Positioner) *ast.File {
return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())]
}
func (j *Job) File(node Positioner) *ast.File { func (j *Job) File(node Positioner) *ast.File {
return j.Program.tokenFileMap[j.Program.SSA.Fset.File(node.Pos())] return j.Program.File(node)
} }
// TODO(dh): switch to sort.Slice when Go 1.9 lands. // TODO(dh): switch to sort.Slice when Go 1.9 lands.
@@ -116,7 +207,7 @@ func (ps byPosition) Len() int {
} }
func (ps byPosition) Less(i int, j int) bool { func (ps byPosition) Less(i int, j int) bool {
pi, pj := ps.fset.Position(ps.ps[i].Position), ps.fset.Position(ps.ps[j].Position) pi, pj := ps.ps[i].Position, ps.ps[j].Position
if pi.Filename != pj.Filename { if pi.Filename != pj.Filename {
return pi.Filename < pj.Filename return pi.Filename < pj.Filename
@@ -135,16 +226,40 @@ func (ps byPosition) Swap(i int, j int) {
ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i] ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i]
} }
func (l *Linter) Lint(lprog *loader.Program) []Problem { func parseDirective(s string) (cmd string, args []string) {
if !strings.HasPrefix(s, "//lint:") {
return "", nil
}
s = strings.TrimPrefix(s, "//lint:")
fields := strings.Split(s, " ")
return fields[0], fields[1:]
}
func (l *Linter) Lint(lprog *loader.Program, conf *loader.Config) []Problem {
ssaprog := ssautil.CreateProgram(lprog, ssa.GlobalDebug) ssaprog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
ssaprog.Build() ssaprog.Build()
pkgMap := map[*ssa.Package]*Pkg{} pkgMap := map[*ssa.Package]*Pkg{}
var pkgs []*Pkg var pkgs []*Pkg
for _, pkginfo := range lprog.InitialPackages() { for _, pkginfo := range lprog.InitialPackages() {
ssapkg := ssaprog.Package(pkginfo.Pkg) ssapkg := ssaprog.Package(pkginfo.Pkg)
var bp *build.Package
if len(pkginfo.Files) != 0 {
path := lprog.Fset.Position(pkginfo.Files[0].Pos()).Filename
dir := filepath.Dir(path)
var err error
ctx := conf.Build
if ctx == nil {
ctx = &build.Default
}
bp, err = ctx.ImportDir(dir, 0)
if err != nil {
// shouldn't happen
}
}
pkg := &Pkg{ pkg := &Pkg{
Package: ssapkg, Package: ssapkg,
Info: pkginfo, Info: pkginfo,
BuildPkg: bp,
} }
pkgMap[ssapkg] = pkg pkgMap[ssapkg] = pkg
pkgs = append(pkgs, pkg) pkgs = append(pkgs, pkg)
@@ -158,6 +273,7 @@ func (l *Linter) Lint(lprog *loader.Program) []Problem {
tokenFileMap: map[*token.File]*ast.File{}, tokenFileMap: map[*token.File]*ast.File{},
astFileMap: map[*ast.File]*Pkg{}, astFileMap: map[*ast.File]*Pkg{},
} }
initial := map[*types.Package]struct{}{} initial := map[*types.Package]struct{}{}
for _, pkg := range pkgs { for _, pkg := range pkgs {
initial[pkg.Info.Pkg] = struct{}{} initial[pkg.Info.Pkg] = struct{}{}
@@ -176,9 +292,69 @@ func (l *Linter) Lint(lprog *loader.Program) []Problem {
ssapkg := ssaprog.Package(pkg.Info.Pkg) ssapkg := ssaprog.Package(pkg.Info.Pkg)
for _, f := range pkg.Info.Files { for _, f := range pkg.Info.Files {
prog.astFileMap[f] = pkgMap[ssapkg]
}
}
for _, pkginfo := range lprog.AllPackages {
for _, f := range pkginfo.Files {
tf := lprog.Fset.File(f.Pos()) tf := lprog.Fset.File(f.Pos())
prog.tokenFileMap[tf] = f prog.tokenFileMap[tf] = f
prog.astFileMap[f] = pkgMap[ssapkg] }
}
var out []Problem
l.automaticIgnores = nil
for _, pkginfo := range lprog.InitialPackages() {
for _, f := range pkginfo.Files {
cm := ast.NewCommentMap(lprog.Fset, f, f.Comments)
for node, cgs := range cm {
for _, cg := range cgs {
for _, c := range cg.List {
if !strings.HasPrefix(c.Text, "//lint:") {
continue
}
cmd, args := parseDirective(c.Text)
switch cmd {
case "ignore", "file-ignore":
if len(args) < 2 {
// FIXME(dh): this causes duplicated warnings when using megacheck
p := Problem{
pos: c.Pos(),
Position: prog.DisplayPosition(c.Pos()),
Text: "malformed linter directive; missing the required reason field?",
Check: "",
Checker: l.Checker.Name(),
Package: nil,
}
out = append(out, p)
continue
}
default:
// unknown directive, ignore
continue
}
checks := strings.Split(args[0], ",")
pos := prog.DisplayPosition(node.Pos())
var ig Ignore
switch cmd {
case "ignore":
ig = &LineIgnore{
File: pos.Filename,
Line: pos.Line,
Checks: checks,
pos: c.Pos(),
}
case "file-ignore":
ig = &FileIgnore{
File: pos.Filename,
Checks: checks,
}
}
l.automaticIgnores = append(l.automaticIgnores, ig)
}
}
}
} }
} }
@@ -237,6 +413,7 @@ func (l *Linter) Lint(lprog *loader.Program) []Problem {
for _, k := range keys { for _, k := range keys {
j := &Job{ j := &Job{
Program: prog, Program: prog,
checker: l.Checker.Name(),
check: k, check: k,
} }
jobs = append(jobs, j) jobs = append(jobs, j)
@@ -255,15 +432,47 @@ func (l *Linter) Lint(lprog *loader.Program) []Problem {
} }
wg.Wait() wg.Wait()
var out []Problem
for _, j := range jobs { for _, j := range jobs {
for _, p := range j.problems { for _, p := range j.problems {
if !l.ignore(j, p) { p.Ignored = l.ignore(p)
if l.ReturnIgnored || !p.Ignored {
out = append(out, p) out = append(out, p)
} }
} }
} }
for _, ig := range l.automaticIgnores {
ig, ok := ig.(*LineIgnore)
if !ok {
continue
}
if ig.matched {
continue
}
for _, c := range ig.Checks {
idx := strings.IndexFunc(c, func(r rune) bool {
return unicode.IsNumber(r)
})
if idx == -1 {
// malformed check name, backing out
continue
}
if c[:idx] != l.Checker.Prefix() {
// not for this checker
continue
}
p := Problem{
pos: ig.pos,
Position: prog.DisplayPosition(ig.pos),
Text: "this linter directive didn't match anything; should it be removed?",
Check: "",
Checker: l.Checker.Name(),
Package: nil,
}
out = append(out, p)
}
}
sort.Sort(byPosition{lprog.Fset, out}) sort.Sort(byPosition{lprog.Fset, out})
return out return out
} }
@@ -272,6 +481,7 @@ func (l *Linter) Lint(lprog *loader.Program) []Problem {
type Pkg struct { type Pkg struct {
*ssa.Package *ssa.Package
Info *loader.PackageInfo Info *loader.PackageInfo
BuildPkg *build.Package
} }
type packager interface { type packager interface {
@@ -309,10 +519,55 @@ type Positioner interface {
Pos() token.Pos Pos() token.Pos
} }
func (prog *Program) DisplayPosition(p token.Pos) token.Position {
// The //line compiler directive can be used to change the file
// name and line numbers associated with code. This can, for
// example, be used by code generation tools. The most prominent
// example is 'go tool cgo', which uses //line directives to refer
// back to the original source code.
//
// In the context of our linters, we need to treat these
// directives differently depending on context. For cgo files, we
// want to honour the directives, so that line numbers are
// adjusted correctly. For all other files, we want to ignore the
// directives, so that problems are reported at their actual
// position and not, for example, a yacc grammar file. This also
// affects the ignore mechanism, since it operates on the position
// information stored within problems. With this implementation, a
// user will ignore foo.go, not foo.y
pkg := prog.astFileMap[prog.tokenFileMap[prog.Prog.Fset.File(p)]]
bp := pkg.BuildPkg
adjPos := prog.Prog.Fset.Position(p)
if bp == nil {
// couldn't find the package for some reason (deleted? faulty
// file system?)
return adjPos
}
base := filepath.Base(adjPos.Filename)
for _, f := range bp.CgoFiles {
if f == base {
// this is a cgo file, use the adjusted position
return adjPos
}
}
// not a cgo file, ignore //line directives
return prog.Prog.Fset.PositionFor(p, false)
}
func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem {
tf := j.Program.SSA.Fset.File(n.Pos())
f := j.Program.tokenFileMap[tf]
pkg := j.Program.astFileMap[f].Pkg
pos := j.Program.DisplayPosition(n.Pos())
problem := Problem{ problem := Problem{
Position: n.Pos(), pos: n.Pos(),
Text: fmt.Sprintf(format, args...) + fmt.Sprintf(" (%s)", j.check), Position: pos,
Text: fmt.Sprintf(format, args...),
Check: j.check,
Checker: j.checker,
Package: pkg,
} }
j.problems = append(j.problems, problem) j.problems = append(j.problems, problem)
return &j.problems[len(j.problems)-1] return &j.problems[len(j.problems)-1]
@@ -422,6 +677,31 @@ func IsGenerated(f *ast.File) bool {
return false return false
} }
func Preamble(f *ast.File) string {
cutoff := f.Package
if f.Doc != nil {
cutoff = f.Doc.Pos()
}
var out []string
for _, cmt := range f.Comments {
if cmt.Pos() >= cutoff {
break
}
out = append(out, cmt.Text())
}
return strings.Join(out, "\n")
}
func IsPointerLike(T types.Type) bool {
switch T := T.Underlying().(type) {
case *types.Interface, *types.Chan, *types.Map, *types.Pointer:
return true
case *types.Basic:
return T.Kind() == types.UnsafePointer
}
return false
}
func (j *Job) IsGoVersion(minor int) bool { func (j *Job) IsGoVersion(minor int) bool {
return j.Program.GoVersion >= minor return j.Program.GoVersion >= minor
} }
@@ -448,6 +728,22 @@ func (j *Job) IsCallToAnyAST(node ast.Node, names ...string) bool {
return false return false
} }
func (j *Job) SelectorName(expr *ast.SelectorExpr) string {
sel := j.Program.Info.Selections[expr]
if sel == nil {
if x, ok := expr.X.(*ast.Ident); ok {
pkg, ok := j.Program.Info.ObjectOf(x).(*types.PkgName)
if !ok {
// This shouldn't happen
return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
}
return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
}
panic(fmt.Sprintf("unsupported selector: %v", expr))
}
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
}
func CallName(call *ssa.CallCommon) string { func CallName(call *ssa.CallCommon) string {
if call.IsInvoke() { if call.IsInvoke() {
return "" return ""

View File

@@ -8,23 +8,70 @@
package lintutil // import "honnef.co/go/tools/lint/lintutil" package lintutil // import "honnef.co/go/tools/lint/lintutil"
import ( import (
"encoding/json"
"errors" "errors"
"flag" "flag"
"fmt" "fmt"
"go/build" "go/build"
"go/parser" "go/parser"
"go/token" "go/token"
"go/types"
"io"
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
"honnef.co/go/tools/lint" "honnef.co/go/tools/lint"
"honnef.co/go/tools/version"
"github.com/kisielk/gotool" "github.com/kisielk/gotool"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
type OutputFormatter interface {
Format(p lint.Problem)
}
type TextOutput struct {
w io.Writer
}
func (o TextOutput) Format(p lint.Problem) {
fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String())
}
type JSONOutput struct {
w io.Writer
}
func (o JSONOutput) Format(p lint.Problem) {
type location struct {
File string `json:"file"`
Line int `json:"line"`
Column int `json:"column"`
}
jp := struct {
Checker string `json:"checker"`
Code string `json:"code"`
Severity string `json:"severity,omitempty"`
Location location `json:"location"`
Message string `json:"message"`
Ignored bool `json:"ignored"`
}{
p.Checker,
p.Check,
"", // TODO(dh): support severity
location{
p.Position.Filename,
p.Position.Line,
p.Position.Column,
},
p.Text,
p.Ignored,
}
_ = json.NewEncoder(o.w).Encode(jp)
}
func usage(name string, flags *flag.FlagSet) func() { func usage(name string, flags *flag.FlagSet) func() {
return func() { return func() {
fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) fmt.Fprintf(os.Stderr, "Usage of %s:\n", name)
@@ -42,9 +89,10 @@ type runner struct {
tags []string tags []string
ignores []lint.Ignore ignores []lint.Ignore
version int version int
returnIgnored bool
} }
func (runner runner) resolveRelative(importPaths []string) (goFiles bool, err error) { func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) {
if len(importPaths) == 0 { if len(importPaths) == 0 {
return false, nil return false, nil
} }
@@ -57,7 +105,7 @@ func (runner runner) resolveRelative(importPaths []string) (goFiles bool, err er
return false, err return false, err
} }
ctx := build.Default ctx := build.Default
ctx.BuildTags = runner.tags ctx.BuildTags = tags
for i, path := range importPaths { for i, path := range importPaths {
bpkg, err := ctx.Import(path, wd, build.FindOnly) bpkg, err := ctx.Import(path, wd, build.FindOnly)
if err != nil { if err != nil {
@@ -80,7 +128,7 @@ func parseIgnore(s string) ([]lint.Ignore, error) {
} }
path := p[0] path := p[0]
checks := strings.Split(p[1], ",") checks := strings.Split(p[1], ",")
out = append(out, lint.Ignore{Pattern: path, Checks: checks}) out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks})
} }
return out, nil return out, nil
} }
@@ -117,6 +165,9 @@ func FlagSet(name string) *flag.FlagSet {
flags.String("tags", "", "List of `build tags`") flags.String("tags", "", "List of `build tags`")
flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'") flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'")
flags.Bool("tests", true, "Include tests") flags.Bool("tests", true, "Include tests")
flags.Bool("version", false, "Print version and exit")
flags.Bool("show-ignored", false, "Don't filter ignored problems")
flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')")
tags := build.Default.ReleaseTags tags := build.Default.ReleaseTags
v := tags[len(tags)-1][2:] v := tags[len(tags)-1][2:]
@@ -129,67 +180,105 @@ func FlagSet(name string) *flag.FlagSet {
return flags return flags
} }
func ProcessFlagSet(c lint.Checker, fs *flag.FlagSet) { type CheckerConfig struct {
Checker lint.Checker
ExitNonZero bool
}
func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet) {
tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string)
ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string)
tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool)
version := fs.Lookup("go").Value.(flag.Getter).Get().(int) goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int)
format := fs.Lookup("f").Value.(flag.Getter).Get().(string)
printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool)
showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool)
ps, lprog, err := Lint(c, fs.Args(), &Options{ if printVersion {
version.Print()
os.Exit(0)
}
var cs []lint.Checker
for _, conf := range confs {
cs = append(cs, conf.Checker)
}
pss, err := Lint(cs, fs.Args(), &Options{
Tags: strings.Fields(tags), Tags: strings.Fields(tags),
LintTests: tests, LintTests: tests,
Ignores: ignore, Ignores: ignore,
GoVersion: version, GoVersion: goVersion,
ReturnIgnored: showIgnored,
}) })
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
} }
unclean := false
for _, p := range ps { var ps []lint.Problem
unclean = true for _, p := range pss {
pos := lprog.Fset.Position(p.Position) ps = append(ps, p...)
fmt.Printf("%v: %s\n", relativePositionString(pos), p.Text)
} }
if unclean {
var f OutputFormatter
switch format {
case "text":
f = TextOutput{os.Stdout}
case "json":
f = JSONOutput{os.Stdout}
default:
fmt.Fprintf(os.Stderr, "unsupported output format %q\n", format)
os.Exit(2)
}
for _, p := range ps {
f.Format(p)
}
for i, p := range pss {
if len(p) != 0 && confs[i].ExitNonZero {
os.Exit(1) os.Exit(1)
} }
} }
}
type Options struct { type Options struct {
Tags []string Tags []string
LintTests bool LintTests bool
Ignores string Ignores string
GoVersion int GoVersion int
ReturnIgnored bool
} }
func Lint(c lint.Checker, pkgs []string, opt *Options) ([]lint.Problem, *loader.Program, error) { func Lint(cs []lint.Checker, pkgs []string, opt *Options) ([][]lint.Problem, error) {
// TODO(dh): Instead of returning the loader.Program, we should
// store token.Position instead of token.Pos in lint.Problem.
if opt == nil { if opt == nil {
opt = &Options{} opt = &Options{}
} }
ignores, err := parseIgnore(opt.Ignores) ignores, err := parseIgnore(opt.Ignores)
if err != nil { if err != nil {
return nil, nil, err return nil, err
}
runner := &runner{
checker: c,
tags: opt.Tags,
ignores: ignores,
version: opt.GoVersion,
} }
paths := gotool.ImportPaths(pkgs) paths := gotool.ImportPaths(pkgs)
goFiles, err := runner.resolveRelative(paths) goFiles, err := resolveRelative(paths, opt.Tags)
if err != nil { if err != nil {
return nil, nil, err return nil, err
} }
ctx := build.Default ctx := build.Default
ctx.BuildTags = runner.tags ctx.BuildTags = opt.Tags
hadError := false
conf := &loader.Config{ conf := &loader.Config{
Build: &ctx, Build: &ctx,
ParserMode: parser.ParseComments, ParserMode: parser.ParseComments,
ImportPkgs: map[string]bool{}, ImportPkgs: map[string]bool{},
TypeChecker: types.Config{
Error: func(err error) {
// Only print the first error found
if hadError {
return
}
hadError = true
fmt.Fprintln(os.Stderr, err)
},
},
} }
if goFiles { if goFiles {
conf.CreateFromFilenames("adhoc", paths...) conf.CreateFromFilenames("adhoc", paths...)
@@ -200,9 +289,21 @@ func Lint(c lint.Checker, pkgs []string, opt *Options) ([]lint.Problem, *loader.
} }
lprog, err := conf.Load() lprog, err := conf.Load()
if err != nil { if err != nil {
return nil, nil, err return nil, err
} }
return runner.lint(lprog), lprog, nil
var problems [][]lint.Problem
for _, c := range cs {
runner := &runner{
checker: c,
tags: opt.Tags,
ignores: ignores,
version: opt.GoVersion,
returnIgnored: opt.ReturnIgnored,
}
problems = append(problems, runner.lint(lprog, conf))
}
return problems, nil
} }
func shortPath(path string) string { func shortPath(path string) string {
@@ -230,18 +331,19 @@ func relativePositionString(pos token.Position) string {
return s return s
} }
func ProcessArgs(name string, c lint.Checker, args []string) { func ProcessArgs(name string, cs []CheckerConfig, args []string) {
flags := FlagSet(name) flags := FlagSet(name)
flags.Parse(args) flags.Parse(args)
ProcessFlagSet(c, flags) ProcessFlagSet(cs, flags)
} }
func (runner *runner) lint(lprog *loader.Program) []lint.Problem { func (runner *runner) lint(lprog *loader.Program, conf *loader.Config) []lint.Problem {
l := &lint.Linter{ l := &lint.Linter{
Checker: runner.checker, Checker: runner.checker,
Ignores: runner.ignores, Ignores: runner.ignores,
GoVersion: runner.version, GoVersion: runner.version,
ReturnIgnored: runner.returnIgnored,
} }
return l.Lint(lprog) return l.Lint(lprog, conf)
} }

View File

@@ -6,9 +6,7 @@ import (
"go/constant" "go/constant"
"go/token" "go/token"
"go/types" "go/types"
"math"
"reflect" "reflect"
"strconv"
"strings" "strings"
"honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/internal/sharedcheck"
@@ -31,6 +29,9 @@ func NewChecker() *Checker {
} }
} }
func (*Checker) Name() string { return "gosimple" }
func (*Checker) Prefix() string { return "S" }
func (c *Checker) Init(prog *lint.Program) { func (c *Checker) Init(prog *lint.Program) {
c.nodeFns = lint.NodeFns(prog.Packages) c.nodeFns = lint.NodeFns(prog.Packages)
} }
@@ -42,7 +43,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"S1002": c.LintIfBoolCmp, "S1002": c.LintIfBoolCmp,
"S1003": c.LintStringsContains, "S1003": c.LintStringsContains,
"S1004": c.LintBytesCompare, "S1004": c.LintBytesCompare,
"S1005": c.LintRanges, "S1005": c.LintUnnecessaryBlank,
"S1006": c.LintForTrue, "S1006": c.LintForTrue,
"S1007": c.LintRegexpRaw, "S1007": c.LintRegexpRaw,
"S1008": c.LintIfReturn, "S1008": c.LintIfReturn,
@@ -51,22 +52,25 @@ func (c *Checker) Funcs() map[string]lint.Func {
"S1011": c.LintLoopAppend, "S1011": c.LintLoopAppend,
"S1012": c.LintTimeSince, "S1012": c.LintTimeSince,
"S1013": c.LintSimplerReturn, "S1013": c.LintSimplerReturn,
"S1014": c.LintReceiveIntoBlank, "S1014": nil,
"S1015": c.LintFormatInt, "S1015": nil,
"S1016": c.LintSimplerStructConversion, "S1016": c.LintSimplerStructConversion,
"S1017": c.LintTrim, "S1017": c.LintTrim,
"S1018": c.LintLoopSlide, "S1018": c.LintLoopSlide,
"S1019": c.LintMakeLenCap, "S1019": c.LintMakeLenCap,
"S1020": c.LintAssertNotNil, "S1020": c.LintAssertNotNil,
"S1021": c.LintDeclareAssign, "S1021": c.LintDeclareAssign,
"S1022": c.LintBlankOK, "S1022": nil,
"S1023": c.LintRedundantBreak, "S1023": c.LintRedundantBreak,
"S1024": c.LintTimeUntil, "S1024": c.LintTimeUntil,
"S1025": c.LintRedundantSprintf, "S1025": c.LintRedundantSprintf,
"S1026": c.LintStringCopy, "S1026": nil,
"S1027": c.LintRedundantReturn, "S1027": nil,
"S1028": c.LintErrorsNewSprintf, "S1028": c.LintErrorsNewSprintf,
"S1029": c.LintRangeStringRunes, "S1029": c.LintRangeStringRunes,
"S1030": c.LintBytesBufferConversions,
"S1031": c.LintNilCheckAroundRange,
"S1032": c.LintSortHelpers,
} }
} }
@@ -247,6 +251,36 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) {
} }
} }
func (c *Checker) LintBytesBufferConversions(j *lint.Job) {
fn := func(node ast.Node) bool {
call, ok := node.(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
return true
}
argCall, ok := call.Args[0].(*ast.CallExpr)
if !ok {
return true
}
sel, ok := argCall.Fun.(*ast.SelectorExpr)
if !ok {
return true
}
typ := j.Program.Info.TypeOf(call.Fun)
if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") {
j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call))
} else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") {
j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call))
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintStringsContains(j *lint.Job) { func (c *Checker) LintStringsContains(j *lint.Job) {
// map of value to token to bool value // map of value to token to bool value
allowed := map[int64]map[token.Token]bool{ allowed := map[int64]map[token.Token]bool{
@@ -352,23 +386,6 @@ func (c *Checker) LintBytesCompare(j *lint.Job) {
} }
} }
func (c *Checker) LintRanges(j *lint.Job) {
fn := func(node ast.Node) bool {
rs, ok := node.(*ast.RangeStmt)
if !ok {
return true
}
if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) {
j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintForTrue(j *lint.Job) { func (c *Checker) LintForTrue(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
loop, ok := node.(*ast.ForStmt) loop, ok := node.(*ast.ForStmt)
@@ -941,14 +958,44 @@ func (c *Checker) LintSimplerReturn(j *lint.Job) {
} }
} }
func (c *Checker) LintReceiveIntoBlank(j *lint.Job) { func (c *Checker) LintUnnecessaryBlank(j *lint.Job) {
fn := func(node ast.Node) bool { fn1 := func(node ast.Node) {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return
}
if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
return
}
if !lint.IsBlank(assign.Lhs[1]) {
return
}
switch rhs := assign.Rhs[0].(type) {
case *ast.IndexExpr:
// The type-checker should make sure that it's a map, but
// let's be safe.
if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
return
}
case *ast.UnaryExpr:
if rhs.Op != token.ARROW {
return
}
default:
return
}
cp := *assign
cp.Lhs = cp.Lhs[0:1]
j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign))
}
fn2 := func(node ast.Node) {
stmt, ok := node.(*ast.AssignStmt) stmt, ok := node.(*ast.AssignStmt)
if !ok { if !ok {
return true return
} }
if len(stmt.Lhs) != len(stmt.Rhs) { if len(stmt.Lhs) != len(stmt.Rhs) {
return true return
} }
for i, lh := range stmt.Lhs { for i, lh := range stmt.Lhs {
rh := stmt.Rhs[i] rh := stmt.Rhs[i]
@@ -964,100 +1011,23 @@ func (c *Checker) LintReceiveIntoBlank(j *lint.Job) {
} }
j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'")
} }
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
} }
func (c *Checker) LintFormatInt(j *lint.Job) { fn3 := func(node ast.Node) {
checkBasic := func(v ast.Expr) bool { rs, ok := node.(*ast.RangeStmt)
typ, ok := j.Program.Info.TypeOf(v).(*types.Basic)
if !ok { if !ok {
return false return
} }
return typ.Kind() == types.Int if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) {
j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
} }
checkConst := func(v *ast.Ident) bool {
c, ok := j.Program.Info.ObjectOf(v).(*types.Const)
if !ok {
return false
}
if c.Val().Kind() != constant.Int {
return false
}
i, _ := constant.Int64Val(c.Val())
return i <= math.MaxInt32
}
checkConstStrict := func(v *ast.Ident) bool {
if !checkConst(v) {
return false
}
basic, ok := j.Program.Info.ObjectOf(v).(*types.Const).Type().(*types.Basic)
return ok && basic.Kind() == types.UntypedInt
} }
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
call, ok := node.(*ast.CallExpr) fn1(node)
if !ok { fn2(node)
return true if j.IsGoVersion(4) {
} fn3(node)
if !j.IsCallToAST(call, "strconv.FormatInt") {
return true
}
if len(call.Args) != 2 {
return true
}
if lit, ok := call.Args[1].(*ast.BasicLit); !ok || lit.Value != "10" {
return true
}
matches := false
switch v := call.Args[0].(type) {
case *ast.CallExpr:
if len(v.Args) != 1 {
return true
}
ident, ok := v.Fun.(*ast.Ident)
if !ok {
return true
}
obj, ok := j.Program.Info.ObjectOf(ident).(*types.TypeName)
if !ok || obj.Parent() != types.Universe || obj.Name() != "int64" {
return true
}
switch vv := v.Args[0].(type) {
case *ast.BasicLit:
i, _ := strconv.ParseInt(vv.Value, 10, 64)
if i <= math.MaxInt32 {
matches = true
}
case *ast.Ident:
if checkConst(vv) || checkBasic(v.Args[0]) {
matches = true
}
default:
if checkBasic(v.Args[0]) {
matches = true
}
}
case *ast.BasicLit:
if v.Kind != token.INT {
return true
}
i, _ := strconv.ParseInt(v.Value, 10, 64)
if i <= math.MaxInt32 {
matches = true
}
case *ast.Ident:
if checkConstStrict(v) {
matches = true
}
}
if matches {
j.Errorf(call, "should use strconv.Itoa instead of strconv.FormatInt")
} }
return true return true
} }
@@ -1067,23 +1037,34 @@ func (c *Checker) LintFormatInt(j *lint.Job) {
} }
func (c *Checker) LintSimplerStructConversion(j *lint.Job) { func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
var skip ast.Node
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
// Do not suggest type conversion between pointers
if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND {
if lit, ok := unary.X.(*ast.CompositeLit); ok {
skip = lit
}
return true
}
if node == skip {
return true
}
lit, ok := node.(*ast.CompositeLit) lit, ok := node.(*ast.CompositeLit)
if !ok { if !ok {
return true return true
} }
typ1 := j.Program.Info.TypeOf(lit.Type) typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named)
if typ1 == nil { if typ1 == nil {
return true return true
} }
// FIXME support pointer to struct
s1, ok := typ1.Underlying().(*types.Struct) s1, ok := typ1.Underlying().(*types.Struct)
if !ok { if !ok {
return true return true
} }
n := s1.NumFields() var typ2 *types.Named
var typ2 types.Type
var ident *ast.Ident var ident *ast.Ident
getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) { getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) {
sel, ok := expr.(*ast.SelectorExpr) sel, ok := expr.(*ast.SelectorExpr)
@@ -1100,8 +1081,10 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if len(lit.Elts) == 0 { if len(lit.Elts) == 0 {
return true return true
} }
if s1.NumFields() != len(lit.Elts) {
return true
}
for i, elt := range lit.Elts { for i, elt := range lit.Elts {
n--
var t types.Type var t types.Type
var id *ast.Ident var id *ast.Ident
var ok bool var ok bool
@@ -1129,21 +1112,27 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if !ok { if !ok {
return true return true
} }
if typ2 != nil && typ2 != t { // All fields must be initialized from the same object
return true
}
if ident != nil && ident.Obj != id.Obj { if ident != nil && ident.Obj != id.Obj {
return true return true
} }
typ2 = t typ2, _ = t.(*types.Named)
if typ2 == nil {
return true
}
ident = id ident = id
} }
if n != 0 { if typ2 == nil {
return true return true
} }
if typ2 == nil { if typ1.Obj().Pkg() != typ2.Obj().Pkg() {
// Do not suggest type conversions between different
// packages. Types in different packages might only match
// by coincidence. Furthermore, if the dependency ever
// adds more fields to its type, it could break the code
// that relies on the type conversion to work.
return true return true
} }
@@ -1157,7 +1146,8 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if !structsIdentical(s1, s2) { if !structsIdentical(s1, s2) {
return true return true
} }
j.Errorf(node, "should use type conversion instead of struct literal") j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal",
ident.Name, typ2.Obj().Name(), typ1.Obj().Name())
return true return true
} }
for _, f := range c.filterGenerated(j.Program.Files) { for _, f := range c.filterGenerated(j.Program.Files) {
@@ -1598,56 +1588,52 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) {
} }
} }
func (c *Checker) LintBlankOK(j *lint.Job) {
fn := func(node ast.Node) bool {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return true
}
if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
return true
}
if !lint.IsBlank(assign.Lhs[1]) {
return true
}
switch rhs := assign.Rhs[0].(type) {
case *ast.IndexExpr:
// The type-checker should make sure that it's a map, but
// let's be safe.
if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
return true
}
case *ast.UnaryExpr:
if rhs.Op != token.ARROW {
return true
}
default:
return true
}
cp := *assign
cp.Lhs = cp.Lhs[0:1]
j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign))
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintRedundantBreak(j *lint.Job) { func (c *Checker) LintRedundantBreak(j *lint.Job) {
fn := func(node ast.Node) bool { fn1 := func(node ast.Node) {
clause, ok := node.(*ast.CaseClause) clause, ok := node.(*ast.CaseClause)
if !ok { if !ok {
return true return
} }
if len(clause.Body) < 2 { if len(clause.Body) < 2 {
return true return
} }
branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt) branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt)
if !ok || branch.Tok != token.BREAK || branch.Label != nil { if !ok || branch.Tok != token.BREAK || branch.Label != nil {
return true return
} }
j.Errorf(branch, "redundant break statement") j.Errorf(branch, "redundant break statement")
return
}
fn2 := func(node ast.Node) {
var ret *ast.FieldList
var body *ast.BlockStmt
switch x := node.(type) {
case *ast.FuncDecl:
ret = x.Type.Results
body = x.Body
case *ast.FuncLit:
ret = x.Type.Results
body = x.Body
default:
return
}
// if the func has results, a return can't be redundant.
// similarly, if there are no statements, there can be
// no return.
if ret != nil || body == nil || len(body.List) < 1 {
return
}
rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt)
if !ok {
return
}
// we don't need to check rst.Results as we already
// checked x.Type.Results to be nil.
j.Errorf(rst, "redundant return statement")
}
fn := func(node ast.Node) bool {
fn1(node)
fn2(node)
return true return true
} }
for _, f := range c.filterGenerated(j.Program.Files) { for _, f := range c.filterGenerated(j.Program.Files) {
@@ -1722,115 +1708,6 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) {
} }
} }
func (c *Checker) LintStringCopy(j *lint.Job) {
emptyStringLit := func(e ast.Expr) bool {
bl, ok := e.(*ast.BasicLit)
return ok && bl.Value == `""`
}
fn := func(node ast.Node) bool {
switch x := node.(type) {
case *ast.BinaryExpr: // "" + s, s + ""
if x.Op != token.ADD {
break
}
l1 := j.Program.Prog.Fset.Position(x.X.Pos()).Line
l2 := j.Program.Prog.Fset.Position(x.Y.Pos()).Line
if l1 != l2 {
break
}
var want ast.Expr
switch {
case emptyStringLit(x.X):
want = x.Y
case emptyStringLit(x.Y):
want = x.X
default:
return true
}
j.Errorf(x, "should use %s instead of %s",
j.Render(want), j.Render(x))
case *ast.CallExpr:
if j.IsCallToAST(x, "fmt.Sprint") && len(x.Args) == 1 {
// fmt.Sprint(x)
argT := j.Program.Info.TypeOf(x.Args[0])
bt, ok := argT.Underlying().(*types.Basic)
if !ok || bt.Kind() != types.String {
return true
}
if c.Implements(j, argT, "fmt.Stringer") || c.Implements(j, argT, "error") {
return true
}
j.Errorf(x, "should use %s instead of %s", j.Render(x.Args[0]), j.Render(x))
return true
}
// string([]byte(s))
bt, ok := j.Program.Info.TypeOf(x.Fun).(*types.Basic)
if !ok || bt.Kind() != types.String {
break
}
nested, ok := x.Args[0].(*ast.CallExpr)
if !ok {
break
}
st, ok := j.Program.Info.TypeOf(nested.Fun).(*types.Slice)
if !ok {
break
}
et, ok := st.Elem().(*types.Basic)
if !ok || et.Kind() != types.Byte {
break
}
xt, ok := j.Program.Info.TypeOf(nested.Args[0]).(*types.Basic)
if !ok || xt.Kind() != types.String {
break
}
j.Errorf(x, "should use %s instead of %s",
j.Render(nested.Args[0]), j.Render(x))
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintRedundantReturn(j *lint.Job) {
fn := func(node ast.Node) bool {
var ret *ast.FieldList
var body *ast.BlockStmt
switch x := node.(type) {
case *ast.FuncDecl:
ret = x.Type.Results
body = x.Body
case *ast.FuncLit:
ret = x.Type.Results
body = x.Body
default:
return true
}
// if the func has results, a return can't be redundant.
// similarly, if there are no statements, there can be
// no return.
if ret != nil || body == nil || len(body.List) < 1 {
return true
}
rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt)
if !ok {
return true
}
// we don't need to check rst.Results as we already
// checked x.Type.Results to be nil.
j.Errorf(rst, "redundant return statement")
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { func (c *Checker) LintErrorsNewSprintf(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
if !j.IsCallToAST(node, "errors.New") { if !j.IsCallToAST(node, "errors.New") {
@@ -1851,3 +1728,129 @@ func (c *Checker) LintErrorsNewSprintf(j *lint.Job) {
func (c *Checker) LintRangeStringRunes(j *lint.Job) { func (c *Checker) LintRangeStringRunes(j *lint.Job) {
sharedcheck.CheckRangeStringRunes(c.nodeFns, j) sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
} }
func (c *Checker) LintNilCheckAroundRange(j *lint.Job) {
fn := func(node ast.Node) bool {
ifstmt, ok := node.(*ast.IfStmt)
if !ok {
return true
}
cond, ok := ifstmt.Cond.(*ast.BinaryExpr)
if !ok {
return true
}
if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 {
return true
}
loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt)
if !ok {
return true
}
ifXIdent, ok := cond.X.(*ast.Ident)
if !ok {
return true
}
rangeXIdent, ok := loop.X.(*ast.Ident)
if !ok {
return true
}
if ifXIdent.Obj != rangeXIdent.Obj {
return true
}
switch j.Program.Info.TypeOf(rangeXIdent).(type) {
case *types.Slice, *types.Map:
j.Errorf(node, "unnecessary nil check around range")
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func isPermissibleSort(j *lint.Job, node ast.Node) bool {
call := node.(*ast.CallExpr)
typeconv, ok := call.Args[0].(*ast.CallExpr)
if !ok {
return true
}
sel, ok := typeconv.Fun.(*ast.SelectorExpr)
if !ok {
return true
}
name := j.SelectorName(sel)
switch name {
case "sort.IntSlice", "sort.Float64Slice", "sort.StringSlice":
default:
return true
}
return false
}
func (c *Checker) LintSortHelpers(j *lint.Job) {
fnFuncs := func(node ast.Node) bool {
var body *ast.BlockStmt
switch node := node.(type) {
case *ast.FuncLit:
body = node.Body
case *ast.FuncDecl:
body = node.Body
default:
return true
}
if body == nil {
return true
}
type Error struct {
node lint.Positioner
msg string
}
var errors []Error
permissible := false
fnSorts := func(node ast.Node) bool {
if permissible {
return false
}
if !j.IsCallToAST(node, "sort.Sort") {
return true
}
if isPermissibleSort(j, node) {
permissible = true
return false
}
call := node.(*ast.CallExpr)
typeconv := call.Args[0].(*ast.CallExpr)
sel := typeconv.Fun.(*ast.SelectorExpr)
name := j.SelectorName(sel)
switch name {
case "sort.IntSlice":
errors = append(errors, Error{node, "should use sort.Ints(...) instead of sort.Sort(sort.IntSlice(...))"})
case "sort.Float64Slice":
errors = append(errors, Error{node, "should use sort.Float64s(...) instead of sort.Sort(sort.Float64Slice(...))"})
case "sort.StringSlice":
errors = append(errors, Error{node, "should use sort.Strings(...) instead of sort.Sort(sort.StringSlice(...))"})
}
return true
}
ast.Inspect(body, fnSorts)
if permissible {
return false
}
for _, err := range errors {
j.Errorf(err.node, "%s", err.msg)
}
return false
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fnFuncs)
}
}

View File

@@ -0,0 +1,21 @@
package staticcheck
import (
"go/ast"
"strings"
"honnef.co/go/tools/lint"
)
func buildTags(f *ast.File) [][]string {
var out [][]string
for _, line := range strings.Split(lint.Preamble(f), "\n") {
if !strings.HasPrefix(line, "+build ") {
continue
}
line = strings.TrimSpace(strings.TrimPrefix(line, "+build "))
fields := strings.Fields(line)
out = append(out, fields)
}
return out
}

View File

@@ -4,19 +4,20 @@ package staticcheck // import "honnef.co/go/tools/staticcheck"
import ( import (
"fmt" "fmt"
"go/ast" "go/ast"
"go/build"
"go/constant" "go/constant"
"go/token" "go/token"
"go/types" "go/types"
htmltemplate "html/template" htmltemplate "html/template"
"net/http" "net/http"
"regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
texttemplate "text/template" texttemplate "text/template"
"honnef.co/go/tools/deprecated"
"honnef.co/go/tools/functions" "honnef.co/go/tools/functions"
"honnef.co/go/tools/gcsizes"
"honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/internal/sharedcheck"
"honnef.co/go/tools/lint" "honnef.co/go/tools/lint"
"honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa"
@@ -110,14 +111,12 @@ var (
}, },
} }
checkSyncPoolSizeRules = map[string]CallCheck{ checkSyncPoolValueRules = map[string]CallCheck{
"(*sync.Pool).Put": func(call *Call) { "(*sync.Pool).Put": func(call *Call) {
// TODO(dh): allow users to pass in a custom build environment
sizes := gcsizes.ForArch(build.Default.GOARCH)
arg := call.Args[0] arg := call.Args[0]
typ := arg.Value.Value.Type() typ := arg.Value.Value.Type()
if !types.IsInterface(typ) && sizes.Sizeof(typ) > sizes.WordSize { if !lint.IsPointerLike(typ) {
arg.Invalid("argument should be one word large or less to avoid allocations") arg.Invalid("argument should be pointer-like to avoid allocations")
} }
}, },
} }
@@ -151,6 +150,7 @@ var (
checkUnmarshalPointerRules = map[string]CallCheck{ checkUnmarshalPointerRules = map[string]CallCheck{
"encoding/xml.Unmarshal": unmarshalPointer("xml.Unmarshal", 1), "encoding/xml.Unmarshal": unmarshalPointer("xml.Unmarshal", 1),
"(*encoding/xml.Decoder).Decode": unmarshalPointer("Decode", 0), "(*encoding/xml.Decoder).Decode": unmarshalPointer("Decode", 0),
"(*encoding/xml.Decoder).DecodeElement": unmarshalPointer("DecodeElement", 0),
"encoding/json.Unmarshal": unmarshalPointer("json.Unmarshal", 1), "encoding/json.Unmarshal": unmarshalPointer("json.Unmarshal", 1),
"(*encoding/json.Decoder).Decode": unmarshalPointer("Decode", 0), "(*encoding/json.Decoder).Decode": unmarshalPointer("Decode", 0),
} }
@@ -208,6 +208,9 @@ func NewChecker() *Checker {
return &Checker{} return &Checker{}
} }
func (*Checker) Name() string { return "staticcheck" }
func (*Checker) Prefix() string { return "SA" }
func (c *Checker) Funcs() map[string]lint.Func { func (c *Checker) Funcs() map[string]lint.Func {
return map[string]lint.Func{ return map[string]lint.Func{
"SA1000": c.callChecker(checkRegexpRules), "SA1000": c.callChecker(checkRegexpRules),
@@ -232,7 +235,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA1019": c.CheckDeprecated, "SA1019": c.CheckDeprecated,
"SA1020": c.callChecker(checkListenAddressRules), "SA1020": c.callChecker(checkListenAddressRules),
"SA1021": c.callChecker(checkBytesEqualIPRules), "SA1021": c.callChecker(checkBytesEqualIPRules),
"SA1022": c.CheckFlagUsage, "SA1022": nil,
"SA1023": c.CheckWriterBufferModified, "SA1023": c.CheckWriterBufferModified,
"SA1024": c.callChecker(checkUniqueCutsetRules), "SA1024": c.callChecker(checkUniqueCutsetRules),
@@ -249,7 +252,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA4002": c.CheckDiffSizeComparison, "SA4002": c.CheckDiffSizeComparison,
"SA4003": c.CheckUnsignedComparison, "SA4003": c.CheckUnsignedComparison,
"SA4004": c.CheckIneffectiveLoop, "SA4004": c.CheckIneffectiveLoop,
"SA4005": c.CheckIneffecitiveFieldAssignments, "SA4005": c.CheckIneffectiveFieldAssignments,
"SA4006": c.CheckUnreadVariableValues, "SA4006": c.CheckUnreadVariableValues,
// "SA4007": c.CheckPredeterminedBooleanExprs, // "SA4007": c.CheckPredeterminedBooleanExprs,
"SA4007": nil, "SA4007": nil,
@@ -263,6 +266,8 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA4015": c.callChecker(checkMathIntRules), "SA4015": c.callChecker(checkMathIntRules),
"SA4016": c.CheckSillyBitwiseOps, "SA4016": c.CheckSillyBitwiseOps,
"SA4017": c.CheckPureFunctions, "SA4017": c.CheckPureFunctions,
"SA4018": c.CheckSelfAssignment,
"SA4019": c.CheckDuplicateBuildConstraints,
"SA5000": c.CheckNilMaps, "SA5000": c.CheckNilMaps,
"SA5001": c.CheckEarlyDefer, "SA5001": c.CheckEarlyDefer,
@@ -275,13 +280,15 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA6000": c.callChecker(checkRegexpMatchLoopRules), "SA6000": c.callChecker(checkRegexpMatchLoopRules),
"SA6001": c.CheckMapBytesKey, "SA6001": c.CheckMapBytesKey,
"SA6002": c.callChecker(checkSyncPoolSizeRules), "SA6002": c.callChecker(checkSyncPoolValueRules),
"SA6003": c.CheckRangeStringRunes, "SA6003": c.CheckRangeStringRunes,
"SA6004": nil,
"SA9000": nil, "SA9000": nil,
"SA9001": c.CheckDubiousDeferInChannelRangeLoop, "SA9001": c.CheckDubiousDeferInChannelRangeLoop,
"SA9002": c.CheckNonOctalFileMode, "SA9002": c.CheckNonOctalFileMode,
"SA9003": c.CheckEmptyBranch, "SA9003": c.CheckEmptyBranch,
"SA9004": c.CheckMissingEnumTypesInDeclaration,
} }
} }
@@ -298,36 +305,62 @@ func (c *Checker) filterGenerated(files []*ast.File) []*ast.File {
return out return out
} }
func (c *Checker) Init(prog *lint.Program) { func (c *Checker) deprecateObject(m map[types.Object]string, prog *lint.Program, obj types.Object) {
c.funcDescs = functions.NewDescriptions(prog.SSA) if obj.Pkg() == nil {
c.deprecatedObjs = map[types.Object]string{} return
c.nodeFns = map[ast.Node]*ssa.Function{} }
f := prog.File(obj)
if f == nil {
return
}
msg := c.deprecationMessage(f, prog.Prog.Fset, obj)
if msg != "" {
m[obj] = msg
}
}
func (c *Checker) Init(prog *lint.Program) {
wg := &sync.WaitGroup{}
wg.Add(3)
go func() {
c.funcDescs = functions.NewDescriptions(prog.SSA)
for _, fn := range prog.AllFunctions { for _, fn := range prog.AllFunctions {
if fn.Blocks != nil { if fn.Blocks != nil {
applyStdlibKnowledge(fn) applyStdlibKnowledge(fn)
ssa.OptimizeBlocks(fn) ssa.OptimizeBlocks(fn)
} }
} }
wg.Done()
}()
go func() {
c.nodeFns = lint.NodeFns(prog.Packages) c.nodeFns = lint.NodeFns(prog.Packages)
wg.Done()
}()
deprecated := []map[types.Object]string{} go func() {
wg := &sync.WaitGroup{} c.deprecatedObjs = map[types.Object]string{}
for _, pkginfo := range prog.Prog.AllPackages { for _, ssapkg := range prog.SSA.AllPackages() {
pkginfo := pkginfo ssapkg := ssapkg
scope := pkginfo.Pkg.Scope() for _, member := range ssapkg.Members {
names := scope.Names() obj := member.Object()
wg.Add(1) if obj == nil {
continue
}
c.deprecateObject(c.deprecatedObjs, prog, obj)
if typ, ok := obj.Type().(*types.Named); ok {
for i := 0; i < typ.NumMethods(); i++ {
meth := typ.Method(i)
c.deprecateObject(c.deprecatedObjs, prog, meth)
}
m := map[types.Object]string{} if iface, ok := typ.Underlying().(*types.Interface); ok {
deprecated = append(deprecated, m) for i := 0; i < iface.NumExplicitMethods(); i++ {
go func(m map[types.Object]string) { meth := iface.ExplicitMethod(i)
for _, name := range names { c.deprecateObject(c.deprecatedObjs, prog, meth)
obj := scope.Lookup(name) }
msg := c.deprecationMessage(pkginfo.Files, prog.SSA.Fset, obj) }
if msg != "" {
m[obj] = msg
} }
if typ, ok := obj.Type().Underlying().(*types.Struct); ok { if typ, ok := obj.Type().Underlying().(*types.Struct); ok {
n := typ.NumFields() n := typ.NumFields()
@@ -335,51 +368,20 @@ func (c *Checker) Init(prog *lint.Program) {
// FIXME(dh): This code will not find deprecated // FIXME(dh): This code will not find deprecated
// fields in anonymous structs. // fields in anonymous structs.
field := typ.Field(i) field := typ.Field(i)
msg := c.deprecationMessage(pkginfo.Files, prog.SSA.Fset, field) c.deprecateObject(c.deprecatedObjs, prog, field)
if msg != "" {
m[field] = msg
} }
} }
} }
} }
wg.Done() wg.Done()
}(m) }()
}
wg.Wait() wg.Wait()
for _, m := range deprecated {
for k, v := range m {
c.deprecatedObjs[k] = v
}
}
} }
// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) func (c *Checker) deprecationMessage(file *ast.File, fset *token.FileSet, obj types.Object) (message string) {
func tokenFileContainsPos(f *token.File, pos token.Pos) bool { pos := obj.Pos()
p := int(pos) path, _ := astutil.PathEnclosingInterval(file, pos, pos)
base := f.Base()
return base <= p && p < base+f.Size()
}
func pathEnclosingInterval(files []*ast.File, fset *token.FileSet, start, end token.Pos) (path []ast.Node, exact bool) {
for _, f := range files {
if f.Pos() == token.NoPos {
// This can happen if the parser saw
// too many errors and bailed out.
// (Use parser.AllErrors to prevent that.)
continue
}
if !tokenFileContainsPos(fset.File(f.Pos()), start) {
continue
}
if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
return path, exact
}
}
return nil, false
}
func (c *Checker) deprecationMessage(files []*ast.File, fset *token.FileSet, obj types.Object) (message string) {
path, _ := pathEnclosingInterval(files, fset, obj.Pos(), obj.Pos())
if len(path) <= 2 { if len(path) <= 2 {
return "" return ""
} }
@@ -1137,13 +1139,20 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) {
} }
} }
// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't
// want to flag.
var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`)
func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { func (c *Checker) CheckIneffectiveCopy(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
if unary, ok := node.(*ast.UnaryExpr); ok { if unary, ok := node.(*ast.UnaryExpr); ok {
if _, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND {
ident, ok := star.X.(*ast.Ident)
if !ok || !cgoIdent.MatchString(ident.Name) {
j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") j.Errorf(unary, "&*x will be simplified to x. It will not copy x.")
} }
} }
}
if star, ok := node.(*ast.StarExpr); ok { if star, ok := node.(*ast.StarExpr); ok {
if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND {
@@ -1254,7 +1263,7 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) {
} }
} }
func (c *Checker) CheckIneffecitiveFieldAssignments(j *lint.Job) { func (c *Checker) CheckIneffectiveFieldAssignments(j *lint.Job) {
for _, ssafn := range j.Program.InitialFunctions { for _, ssafn := range j.Program.InitialFunctions {
// fset := j.Program.SSA.Fset // fset := j.Program.SSA.Fset
// if fset.File(f.File.Pos()) != fset.File(ssafn.Pos()) { // if fset.File(f.File.Pos()) != fset.File(ssafn.Pos()) {
@@ -2055,7 +2064,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) {
} }
for _, b := range mc.Bindings { for _, b := range mc.Bindings {
if b == v { if b == v {
pos := j.Program.SSA.Fset.Position(mc.Fn.Pos()) pos := j.Program.DisplayPosition(mc.Fn.Pos())
j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos)
} }
} }
@@ -2156,6 +2165,11 @@ func (c *Checker) CheckInfiniteRecursion(j *lint.Job) {
if edge.Callee != node { if edge.Callee != node {
continue continue
} }
if _, ok := edge.Site.(*ssa.Go); ok {
// Recursively spawning goroutines doesn't consume
// stack space infinitely, so don't flag it.
continue
}
block := edge.Site.Block() block := edge.Site.Block()
canReturn := false canReturn := false
@@ -2427,7 +2441,7 @@ fnLoop:
if callee == nil { if callee == nil {
continue continue
} }
if c.funcDescs.Get(callee).Pure { if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub {
j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name()) j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name())
continue continue
} }
@@ -2436,22 +2450,6 @@ fnLoop:
} }
} }
func enclosingFunction(j *lint.Job, node ast.Node) *ast.FuncDecl {
f := j.File(node)
path, _ := astutil.PathEnclosingInterval(f, node.Pos(), node.Pos())
for _, e := range path {
fn, ok := e.(*ast.FuncDecl)
if !ok {
continue
}
if fn.Name == nil {
continue
}
return fn
}
return nil
}
func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) {
obj := j.Program.Info.ObjectOf(ident) obj := j.Program.Info.ObjectOf(ident)
if obj.Pkg() == nil { if obj.Pkg() == nil {
@@ -2461,19 +2459,23 @@ func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) {
return alt != "", alt return alt != "", alt
} }
func (c *Checker) enclosingFunc(sel *ast.SelectorExpr) *ssa.Function {
fn := c.nodeFns[sel]
if fn == nil {
return nil
}
for fn.Parent() != nil {
fn = fn.Parent()
}
return fn
}
func (c *Checker) CheckDeprecated(j *lint.Job) { func (c *Checker) CheckDeprecated(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
sel, ok := node.(*ast.SelectorExpr) sel, ok := node.(*ast.SelectorExpr)
if !ok { if !ok {
return true return true
} }
if fn := enclosingFunction(j, sel); fn != nil {
if ok, _ := c.isDeprecated(j, fn.Name); ok {
// functions that are deprecated may use deprecated
// symbols
return true
}
}
obj := j.Program.Info.ObjectOf(sel.Sel) obj := j.Program.Info.ObjectOf(sel.Sel)
if obj.Pkg() == nil { if obj.Pkg() == nil {
@@ -2485,6 +2487,24 @@ func (c *Checker) CheckDeprecated(j *lint.Job) {
return true return true
} }
if ok, alt := c.isDeprecated(j, sel.Sel); ok { if ok, alt := c.isDeprecated(j, sel.Sel); ok {
// Look for the first available alternative, not the first
// version something was deprecated in. If a function was
// deprecated in Go 1.6, an alternative has been available
// already in 1.0, and we're targetting 1.2, it still
// makes sense to use the alternative from 1.0, to be
// future-proof.
minVersion := deprecated.Stdlib[j.SelectorName(sel)].AlternativeAvailableSince
if !j.IsGoVersion(minVersion) {
return true
}
if fn := c.enclosingFunc(sel); fn != nil {
if _, ok := c.deprecatedObjs[fn.Object()]; ok {
// functions that are deprecated may use deprecated
// symbols
return true
}
}
j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt) j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt)
return true return true
} }
@@ -2558,46 +2578,6 @@ func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) {
} }
} }
func (c *Checker) CheckFlagUsage(j *lint.Job) {
for _, ssafn := range j.Program.InitialFunctions {
for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
store, ok := ins.(*ssa.Store)
if !ok {
continue
}
switch addr := store.Addr.(type) {
case *ssa.FieldAddr:
typ := addr.X.Type()
st := deref(typ).Underlying().(*types.Struct)
if types.TypeString(typ, nil) != "*flag.FlagSet" {
continue
}
if st.Field(addr.Field).Name() != "Usage" {
continue
}
case *ssa.Global:
if addr.Pkg.Pkg.Path() != "flag" || addr.Name() != "Usage" {
continue
}
default:
continue
}
fn := unwrapFunction(store.Val)
if fn == nil {
continue
}
for _, oblock := range fn.Blocks {
if hasCallTo(oblock, "os.Exit") {
j.Errorf(store, "the function assigned to Usage shouldn't call os.Exit, but it does")
break
}
}
}
}
}
}
func unwrapFunction(val ssa.Value) *ssa.Function { func unwrapFunction(val ssa.Value) *ssa.Function {
switch val := val.(type) { switch val := val.(type) {
case *ssa.Function: case *ssa.Function:
@@ -2791,3 +2771,116 @@ func (c *Checker) CheckMapBytesKey(j *lint.Job) {
func (c *Checker) CheckRangeStringRunes(j *lint.Job) { func (c *Checker) CheckRangeStringRunes(j *lint.Job) {
sharedcheck.CheckRangeStringRunes(c.nodeFns, j) sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
} }
func (c *Checker) CheckSelfAssignment(j *lint.Job) {
fn := func(node ast.Node) bool {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return true
}
if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) {
return true
}
for i, stmt := range assign.Lhs {
rlh := j.Render(stmt)
rrh := j.Render(assign.Rhs[i])
if rlh == rrh {
j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh)
}
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func buildTagsIdentical(s1, s2 []string) bool {
if len(s1) != len(s2) {
return false
}
s1s := make([]string, len(s1))
copy(s1s, s1)
sort.Strings(s1s)
s2s := make([]string, len(s2))
copy(s2s, s2)
sort.Strings(s2s)
for i, s := range s1s {
if s != s2s[i] {
return false
}
}
return true
}
func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) {
for _, f := range c.filterGenerated(job.Program.Files) {
constraints := buildTags(f)
for i, constraint1 := range constraints {
for j, constraint2 := range constraints {
if i >= j {
continue
}
if buildTagsIdentical(constraint1, constraint2) {
job.Errorf(f, "identical build constraints %q and %q",
strings.Join(constraint1, " "),
strings.Join(constraint2, " "))
}
}
}
}
}
func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) {
fn := func(node ast.Node) bool {
decl, ok := node.(*ast.GenDecl)
if !ok {
return true
}
if !decl.Lparen.IsValid() {
// not a parenthesised gendecl
//
// TODO(dh): do we need this check, considering we require
// decl.Specs to contain 2+ elements?
return true
}
if decl.Tok != token.CONST {
return true
}
if len(decl.Specs) < 2 {
return true
}
if decl.Specs[0].(*ast.ValueSpec).Type == nil {
// first constant doesn't have a type
return true
}
for i, spec := range decl.Specs {
spec := spec.(*ast.ValueSpec)
if len(spec.Names) != 1 || len(spec.Values) != 1 {
return true
}
switch v := spec.Values[0].(type) {
case *ast.BasicLit:
case *ast.UnaryExpr:
if _, ok := v.X.(*ast.BasicLit); !ok {
return true
}
default:
// if it's not a literal it might be typed, such as
// time.Microsecond = 1000 * Nanosecond
return true
}
if i == 0 {
continue
}
if spec.Type != nil {
return true
}
}
j.Errorf(decl, "only the first constant has an explicit type")
return true
}
for _, f := range j.Program.Files {
ast.Inspect(f, fn)
}
}

View File

@@ -26,6 +26,9 @@ type LintChecker struct {
c *Checker c *Checker
} }
func (*LintChecker) Name() string { return "unused" }
func (*LintChecker) Prefix() string { return "U" }
func (l *LintChecker) Init(*lint.Program) {} func (l *LintChecker) Init(*lint.Program) {}
func (l *LintChecker) Funcs() map[string]lint.Func { func (l *LintChecker) Funcs() map[string]lint.Func {
return map[string]lint.Func{ return map[string]lint.Func{
@@ -275,6 +278,51 @@ func (c *Checker) Check(lprog *loader.Program) []Unused {
return unused return unused
} }
// isNoCopyType reports whether a type represents the NoCopy sentinel
// type. The NoCopy type is a named struct with no fields and exactly
// one method `func Lock()` that is empty.
//
// FIXME(dh): currently we're not checking that the function body is
// empty.
func isNoCopyType(typ types.Type) bool {
st, ok := typ.Underlying().(*types.Struct)
if !ok {
return false
}
if st.NumFields() != 0 {
return false
}
named, ok := typ.(*types.Named)
if !ok {
return false
}
if named.NumMethods() != 1 {
return false
}
meth := named.Method(0)
if meth.Name() != "Lock" {
return false
}
sig := meth.Type().(*types.Signature)
if sig.Params().Len() != 0 || sig.Results().Len() != 0 {
return false
}
return true
}
func (c *Checker) useNoCopyFields(typ types.Type) {
if st, ok := typ.Underlying().(*types.Struct); ok {
n := st.NumFields()
for i := 0; i < n; i++ {
field := st.Field(i)
if isNoCopyType(field.Type()) {
c.graph.markUsedBy(field, typ)
}
}
}
}
func (c *Checker) useExportedFields(typ types.Type) { func (c *Checker) useExportedFields(typ types.Type) {
if st, ok := typ.Underlying().(*types.Struct); ok { if st, ok := typ.Underlying().(*types.Struct); ok {
n := st.NumFields() n := st.NumFields()
@@ -485,6 +533,7 @@ func (c *Checker) processTypes(pkg *loader.PackageInfo) {
interfaces = append(interfaces, obj) interfaces = append(interfaces, obj)
} }
case *types.Struct: case *types.Struct:
c.useNoCopyFields(obj)
if pkg.Pkg.Name() != "main" && !c.WholeProgram { if pkg.Pkg.Name() != "main" && !c.WholeProgram {
c.useExportedFields(obj) c.useExportedFields(obj)
} }

View File

@@ -0,0 +1,17 @@
package version
import (
"fmt"
"os"
"path/filepath"
)
const Version = "devel"
func Print() {
if Version == "devel" {
fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0]))
} else {
fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version)
}
}

View File

@@ -0,0 +1,27 @@
Copyright (c) 2015, Daniel Martí. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,58 @@
# interfacer
[![GoDoc](https://godoc.org/mvdan.cc/interfacer?status.svg)](https://godoc.org/mvdan.cc/interfacer)
[![Build Status](https://travis-ci.org/mvdan/interfacer.svg?branch=master)](https://travis-ci.org/mvdan/interfacer)
A linter that suggests interface types. In other words, it warns about
the usage of types that are more specific than necessary.
go get -u mvdan.cc/interfacer
### Usage
```go
func ProcessInput(f *os.File) error {
b, err := ioutil.ReadAll(f)
if err != nil {
return err
}
return processBytes(b)
}
```
```sh
$ interfacer $(go list ./... | grep -v /vendor/)
foo.go:10:19: f can be io.Reader
```
### Basic idea
This tool inspects the parameters of your functions to see if they fit
an interface type that is less specific than the current type.
The example above illustrates this point. Overly specific interfaces
also trigger a warning - if `f` were an `io.ReadCloser`, the same
message would appear.
It suggests interface types defined both in the func's package and the
package's imports (two levels; direct imports and their direct imports).
### False positives
To avoid false positives, it never does any suggestions on functions
that may be implementing an interface method or a named function type.
It also skips parameters passed by value (excluding pointers and
interfaces) on unexported functions, since that would introduce extra
allocations where they are usually not worth the tradeoff.
### Suppressing warnings
If a suggestion is technically correct but doesn't make sense, you can
still suppress the warning by mentioning the type in the function name:
```go
func ProcessInputFile(f *os.File) error {
// use as an io.Reader
}
```

View File

@@ -0,0 +1,50 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package check
import (
"go/ast"
"go/types"
)
type pkgTypes struct {
ifaces map[string]string
funcSigns map[string]bool
}
func (p *pkgTypes) getTypes(pkg *types.Package) {
p.ifaces = make(map[string]string)
p.funcSigns = make(map[string]bool)
done := make(map[*types.Package]bool)
addTypes := func(pkg *types.Package, top bool) {
if done[pkg] {
return
}
done[pkg] = true
ifs, funs := fromScope(pkg.Scope())
fullName := func(name string) string {
if !top {
return pkg.Path() + "." + name
}
return name
}
for iftype, name := range ifs {
// only suggest exported interfaces
if ast.IsExported(name) {
p.ifaces[iftype] = fullName(name)
}
}
for ftype := range funs {
// ignore non-exported func signatures too
p.funcSigns[ftype] = true
}
}
for _, imp := range pkg.Imports() {
addTypes(imp, false)
for _, imp2 := range imp.Imports() {
addTypes(imp2, false)
}
}
addTypes(pkg, true)
}

View File

@@ -0,0 +1,462 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package check // import "mvdan.cc/interfacer/check"
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"os"
"strings"
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
"github.com/kisielk/gotool"
"mvdan.cc/lint"
)
func toDiscard(usage *varUsage) bool {
if usage.discard {
return true
}
for to := range usage.assigned {
if toDiscard(to) {
return true
}
}
return false
}
func allCalls(usage *varUsage, all, ftypes map[string]string) {
for fname := range usage.calls {
all[fname] = ftypes[fname]
}
for to := range usage.assigned {
allCalls(to, all, ftypes)
}
}
func (c *Checker) interfaceMatching(param *types.Var, usage *varUsage) (string, string) {
if toDiscard(usage) {
return "", ""
}
ftypes := typeFuncMap(param.Type())
called := make(map[string]string, len(usage.calls))
allCalls(usage, called, ftypes)
s := funcMapString(called)
return c.ifaces[s], s
}
type varUsage struct {
calls map[string]struct{}
discard bool
assigned map[*varUsage]struct{}
}
type funcDecl struct {
astDecl *ast.FuncDecl
ssaFn *ssa.Function
}
// CheckArgs checks the packages specified by their import paths in
// args.
func CheckArgs(args []string) ([]string, error) {
paths := gotool.ImportPaths(args)
conf := loader.Config{}
conf.AllowErrors = true
rest, err := conf.FromArgs(paths, false)
if err != nil {
return nil, err
}
if len(rest) > 0 {
return nil, fmt.Errorf("unwanted extra args: %v", rest)
}
lprog, err := conf.Load()
if err != nil {
return nil, err
}
prog := ssautil.CreateProgram(lprog, 0)
prog.Build()
c := new(Checker)
c.Program(lprog)
c.ProgramSSA(prog)
issues, err := c.Check()
if err != nil {
return nil, err
}
wd, err := os.Getwd()
if err != nil {
return nil, err
}
lines := make([]string, len(issues))
for i, issue := range issues {
fpos := prog.Fset.Position(issue.Pos()).String()
if strings.HasPrefix(fpos, wd) {
fpos = fpos[len(wd)+1:]
}
lines[i] = fmt.Sprintf("%s: %s", fpos, issue.Message())
}
return lines, nil
}
type Checker struct {
lprog *loader.Program
prog *ssa.Program
pkgTypes
*loader.PackageInfo
funcs []*funcDecl
ssaByPos map[token.Pos]*ssa.Function
discardFuncs map[*types.Signature]struct{}
vars map[*types.Var]*varUsage
}
var (
_ lint.Checker = (*Checker)(nil)
_ lint.WithSSA = (*Checker)(nil)
)
func (c *Checker) Program(lprog *loader.Program) {
c.lprog = lprog
}
func (c *Checker) ProgramSSA(prog *ssa.Program) {
c.prog = prog
}
func (c *Checker) Check() ([]lint.Issue, error) {
var total []lint.Issue
c.ssaByPos = make(map[token.Pos]*ssa.Function)
wantPkg := make(map[*types.Package]bool)
for _, pinfo := range c.lprog.InitialPackages() {
wantPkg[pinfo.Pkg] = true
}
for fn := range ssautil.AllFunctions(c.prog) {
if fn.Pkg == nil { // builtin?
continue
}
if len(fn.Blocks) == 0 { // stub
continue
}
if !wantPkg[fn.Pkg.Pkg] { // not part of given pkgs
continue
}
c.ssaByPos[fn.Pos()] = fn
}
for _, pinfo := range c.lprog.InitialPackages() {
pkg := pinfo.Pkg
c.getTypes(pkg)
c.PackageInfo = c.lprog.AllPackages[pkg]
total = append(total, c.checkPkg()...)
}
return total, nil
}
func (c *Checker) checkPkg() []lint.Issue {
c.discardFuncs = make(map[*types.Signature]struct{})
c.vars = make(map[*types.Var]*varUsage)
c.funcs = c.funcs[:0]
findFuncs := func(node ast.Node) bool {
decl, ok := node.(*ast.FuncDecl)
if !ok {
return true
}
ssaFn := c.ssaByPos[decl.Name.Pos()]
if ssaFn == nil {
return true
}
fd := &funcDecl{
astDecl: decl,
ssaFn: ssaFn,
}
if c.funcSigns[signString(fd.ssaFn.Signature)] {
// implements interface
return true
}
c.funcs = append(c.funcs, fd)
ast.Walk(c, decl.Body)
return true
}
for _, f := range c.Files {
ast.Inspect(f, findFuncs)
}
return c.packageIssues()
}
func paramVarAndType(sign *types.Signature, i int) (*types.Var, types.Type) {
params := sign.Params()
extra := sign.Variadic() && i >= params.Len()-1
if !extra {
if i >= params.Len() {
// builtins with multiple signatures
return nil, nil
}
vr := params.At(i)
return vr, vr.Type()
}
last := params.At(params.Len() - 1)
switch x := last.Type().(type) {
case *types.Slice:
return nil, x.Elem()
default:
return nil, x
}
}
func (c *Checker) varUsage(e ast.Expr) *varUsage {
id, ok := e.(*ast.Ident)
if !ok {
return nil
}
param, ok := c.ObjectOf(id).(*types.Var)
if !ok {
// not a variable
return nil
}
if usage, e := c.vars[param]; e {
return usage
}
if !interesting(param.Type()) {
return nil
}
usage := &varUsage{
calls: make(map[string]struct{}),
assigned: make(map[*varUsage]struct{}),
}
c.vars[param] = usage
return usage
}
func (c *Checker) addUsed(e ast.Expr, as types.Type) {
if as == nil {
return
}
if usage := c.varUsage(e); usage != nil {
// using variable
iface, ok := as.Underlying().(*types.Interface)
if !ok {
usage.discard = true
return
}
for i := 0; i < iface.NumMethods(); i++ {
m := iface.Method(i)
usage.calls[m.Name()] = struct{}{}
}
} else if t, ok := c.TypeOf(e).(*types.Signature); ok {
// using func
c.discardFuncs[t] = struct{}{}
}
}
func (c *Checker) addAssign(to, from ast.Expr) {
pto := c.varUsage(to)
pfrom := c.varUsage(from)
if pto == nil || pfrom == nil {
// either isn't interesting
return
}
pfrom.assigned[pto] = struct{}{}
}
func (c *Checker) discard(e ast.Expr) {
if usage := c.varUsage(e); usage != nil {
usage.discard = true
}
}
func (c *Checker) comparedWith(e, with ast.Expr) {
if _, ok := with.(*ast.BasicLit); ok {
c.discard(e)
}
}
func (c *Checker) Visit(node ast.Node) ast.Visitor {
switch x := node.(type) {
case *ast.SelectorExpr:
if _, ok := c.TypeOf(x.Sel).(*types.Signature); !ok {
c.discard(x.X)
}
case *ast.StarExpr:
c.discard(x.X)
case *ast.UnaryExpr:
c.discard(x.X)
case *ast.IndexExpr:
c.discard(x.X)
case *ast.IncDecStmt:
c.discard(x.X)
case *ast.BinaryExpr:
switch x.Op {
case token.EQL, token.NEQ:
c.comparedWith(x.X, x.Y)
c.comparedWith(x.Y, x.X)
default:
c.discard(x.X)
c.discard(x.Y)
}
case *ast.ValueSpec:
for _, val := range x.Values {
c.addUsed(val, c.TypeOf(x.Type))
}
case *ast.AssignStmt:
for i, val := range x.Rhs {
left := x.Lhs[i]
if x.Tok == token.ASSIGN {
c.addUsed(val, c.TypeOf(left))
}
c.addAssign(left, val)
}
case *ast.CompositeLit:
for i, e := range x.Elts {
switch y := e.(type) {
case *ast.KeyValueExpr:
c.addUsed(y.Key, c.TypeOf(y.Value))
c.addUsed(y.Value, c.TypeOf(y.Key))
case *ast.Ident:
c.addUsed(y, compositeIdentType(c.TypeOf(x), i))
}
}
case *ast.CallExpr:
switch y := c.TypeOf(x.Fun).Underlying().(type) {
case *types.Signature:
c.onMethodCall(x, y)
default:
// type conversion
if len(x.Args) == 1 {
c.addUsed(x.Args[0], y)
}
}
}
return c
}
func compositeIdentType(t types.Type, i int) types.Type {
switch x := t.(type) {
case *types.Named:
return compositeIdentType(x.Underlying(), i)
case *types.Struct:
return x.Field(i).Type()
case *types.Array:
return x.Elem()
case *types.Slice:
return x.Elem()
}
return nil
}
func (c *Checker) onMethodCall(ce *ast.CallExpr, sign *types.Signature) {
for i, e := range ce.Args {
paramObj, t := paramVarAndType(sign, i)
// Don't if this is a parameter being re-used as itself
// in a recursive call
if id, ok := e.(*ast.Ident); ok {
if paramObj == c.ObjectOf(id) {
continue
}
}
c.addUsed(e, t)
}
sel, ok := ce.Fun.(*ast.SelectorExpr)
if !ok {
return
}
// receiver func call on the left side
if usage := c.varUsage(sel.X); usage != nil {
usage.calls[sel.Sel.Name] = struct{}{}
}
}
func (fd *funcDecl) paramGroups() [][]*types.Var {
astList := fd.astDecl.Type.Params.List
groups := make([][]*types.Var, len(astList))
signIndex := 0
for i, field := range astList {
group := make([]*types.Var, len(field.Names))
for j := range field.Names {
group[j] = fd.ssaFn.Signature.Params().At(signIndex)
signIndex++
}
groups[i] = group
}
return groups
}
func (c *Checker) packageIssues() []lint.Issue {
var issues []lint.Issue
for _, fd := range c.funcs {
if _, e := c.discardFuncs[fd.ssaFn.Signature]; e {
continue
}
for _, group := range fd.paramGroups() {
issues = append(issues, c.groupIssues(fd, group)...)
}
}
return issues
}
type Issue struct {
pos token.Pos
msg string
}
func (i Issue) Pos() token.Pos { return i.pos }
func (i Issue) Message() string { return i.msg }
func (c *Checker) groupIssues(fd *funcDecl, group []*types.Var) []lint.Issue {
var issues []lint.Issue
for _, param := range group {
usage := c.vars[param]
if usage == nil {
return nil
}
newType := c.paramNewType(fd.astDecl.Name.Name, param, usage)
if newType == "" {
return nil
}
issues = append(issues, Issue{
pos: param.Pos(),
msg: fmt.Sprintf("%s can be %s", param.Name(), newType),
})
}
return issues
}
func willAddAllocation(t types.Type) bool {
switch t.Underlying().(type) {
case *types.Pointer, *types.Interface:
return false
}
return true
}
func (c *Checker) paramNewType(funcName string, param *types.Var, usage *varUsage) string {
t := param.Type()
if !ast.IsExported(funcName) && willAddAllocation(t) {
return ""
}
if named := typeNamed(t); named != nil {
tname := named.Obj().Name()
vname := param.Name()
if mentionsName(funcName, tname) || mentionsName(funcName, vname) {
return ""
}
}
ifname, iftype := c.interfaceMatching(param, usage)
if ifname == "" {
return ""
}
if types.IsInterface(t.Underlying()) {
if have := funcMapString(typeFuncMap(t)); have == iftype {
return ""
}
}
return ifname
}

View File

@@ -0,0 +1,170 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package check
import (
"bytes"
"fmt"
"go/types"
"sort"
"strings"
)
type methoder interface {
NumMethods() int
Method(int) *types.Func
}
func methoderFuncMap(m methoder, skip bool) map[string]string {
ifuncs := make(map[string]string, m.NumMethods())
for i := 0; i < m.NumMethods(); i++ {
f := m.Method(i)
if !f.Exported() {
if skip {
continue
}
return nil
}
sign := f.Type().(*types.Signature)
ifuncs[f.Name()] = signString(sign)
}
return ifuncs
}
func typeFuncMap(t types.Type) map[string]string {
switch x := t.(type) {
case *types.Pointer:
return typeFuncMap(x.Elem())
case *types.Named:
u := x.Underlying()
if types.IsInterface(u) {
return typeFuncMap(u)
}
return methoderFuncMap(x, true)
case *types.Interface:
return methoderFuncMap(x, false)
default:
return nil
}
}
func funcMapString(iface map[string]string) string {
fnames := make([]string, 0, len(iface))
for fname := range iface {
fnames = append(fnames, fname)
}
sort.Strings(fnames)
var b bytes.Buffer
for i, fname := range fnames {
if i > 0 {
fmt.Fprint(&b, "; ")
}
fmt.Fprint(&b, fname, iface[fname])
}
return b.String()
}
func tupleJoin(buf *bytes.Buffer, t *types.Tuple) {
buf.WriteByte('(')
for i := 0; i < t.Len(); i++ {
if i > 0 {
buf.WriteString(", ")
}
buf.WriteString(t.At(i).Type().String())
}
buf.WriteByte(')')
}
// signString is similar to Signature.String(), but it ignores
// param/result names.
func signString(sign *types.Signature) string {
var buf bytes.Buffer
tupleJoin(&buf, sign.Params())
tupleJoin(&buf, sign.Results())
return buf.String()
}
func interesting(t types.Type) bool {
switch x := t.(type) {
case *types.Interface:
return x.NumMethods() > 1
case *types.Named:
if u := x.Underlying(); types.IsInterface(u) {
return interesting(u)
}
return x.NumMethods() >= 1
case *types.Pointer:
return interesting(x.Elem())
default:
return false
}
}
func anyInteresting(params *types.Tuple) bool {
for i := 0; i < params.Len(); i++ {
t := params.At(i).Type()
if interesting(t) {
return true
}
}
return false
}
func fromScope(scope *types.Scope) (ifaces map[string]string, funcs map[string]bool) {
ifaces = make(map[string]string)
funcs = make(map[string]bool)
for _, name := range scope.Names() {
tn, ok := scope.Lookup(name).(*types.TypeName)
if !ok {
continue
}
switch x := tn.Type().Underlying().(type) {
case *types.Interface:
iface := methoderFuncMap(x, false)
if len(iface) == 0 {
continue
}
for i := 0; i < x.NumMethods(); i++ {
f := x.Method(i)
sign := f.Type().(*types.Signature)
if !anyInteresting(sign.Params()) {
continue
}
funcs[signString(sign)] = true
}
s := funcMapString(iface)
if _, e := ifaces[s]; !e {
ifaces[s] = tn.Name()
}
case *types.Signature:
if !anyInteresting(x.Params()) {
continue
}
funcs[signString(x)] = true
}
}
return ifaces, funcs
}
func mentionsName(fname, name string) bool {
if len(name) < 2 {
return false
}
capit := strings.ToUpper(name[:1]) + name[1:]
lower := strings.ToLower(name)
return strings.Contains(fname, capit) || strings.HasPrefix(fname, lower)
}
func typeNamed(t types.Type) *types.Named {
for {
switch x := t.(type) {
case *types.Named:
return x
case *types.Pointer:
t = x.Elem()
default:
return nil
}
}
}

View File

@@ -0,0 +1,26 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package main // import "mvdan.cc/interfacer"
import (
"flag"
"fmt"
"os"
"mvdan.cc/interfacer/check"
)
var _ = flag.Bool("v", false, "print the names of packages as they are checked")
func main() {
flag.Parse()
lines, err := check.CheckArgs(flag.Args())
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
for _, line := range lines {
fmt.Println(line)
}
}

27
tools/vendor/mvdan.cc/lint/LICENSE vendored Normal file
View File

@@ -0,0 +1,27 @@
Copyright (c) 2017, Daniel Martí. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

27
tools/vendor/mvdan.cc/lint/README.md vendored Normal file
View File

@@ -0,0 +1,27 @@
# lint
[![GoDoc](https://godoc.org/mvdan.cc/lint?status.svg)](https://godoc.org/mvdan.cc/lint)
[![Build Status](https://travis-ci.org/mvdan/lint.svg?branch=master)](https://travis-ci.org/mvdan/lint)
Work in progress. Its API might change before the 1.0 release.
This package intends to define simple interfaces that Go code checkers
can implement. This would simplify calling them from Go code, as well as
running multiple linters while sharing initial loading work.
### metalint
go get -u mvdan.cc/lint/cmd/metalint
The start of a linter that runs many linters leveraging the common
interface. Not stable yet.
Linters included:
* [unparam](https://mvdan.cc/unparam)
* [interfacer](https://github.com/mvdan/interfacer)
### Related projects
* [golinters](https://github.com/thomasheller/golinters) - Report on
linter support

28
tools/vendor/mvdan.cc/lint/lint.go vendored Normal file
View File

@@ -0,0 +1,28 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
// Package lint defines common interfaces for Go code checkers.
package lint // import "mvdan.cc/lint"
import (
"go/token"
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/ssa"
)
// A Checker points out issues in a program.
type Checker interface {
Program(*loader.Program)
Check() ([]Issue, error)
}
type WithSSA interface {
ProgramSSA(*ssa.Program)
}
// Issue represents an issue somewhere in a source code file.
type Issue interface {
Pos() token.Pos
Message() string
}

27
tools/vendor/mvdan.cc/unparam/LICENSE vendored Normal file
View File

@@ -0,0 +1,27 @@
Copyright (c) 2017, Daniel Martí. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

24
tools/vendor/mvdan.cc/unparam/README.md vendored Normal file
View File

@@ -0,0 +1,24 @@
# unparam
[![Build Status](https://travis-ci.org/mvdan/unparam.svg?branch=master)](https://travis-ci.org/mvdan/unparam)
go get -u mvdan.cc/unparam
Reports unused function parameters and results in your code.
To minimise false positives, it ignores certain cases such as:
* Exported functions (by default, see `-exported`)
* Unnamed and underscore parameters
* Funcs that may satisfy an interface
* Funcs that may satisfy a function signature
* Funcs that are stubs (empty, only error, immediately return, etc)
* Funcs that have multiple implementations via build tags
It also reports results that always return the same value, parameters
that always receive the same value, and results that are never used. In
the last two cases, a minimum number of calls is required to ensure that
the warnings are useful.
False positives can still occur by design. The aim of the tool is to be
as precise as possible - if you find any mistakes, file a bug.

View File

@@ -0,0 +1,603 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
// Package check implements the unparam linter. Note that its API is not
// stable.
package check // import "mvdan.cc/unparam/check"
import (
"bytes"
"fmt"
"go/ast"
"go/constant"
"go/parser"
"go/printer"
"go/token"
"go/types"
"io"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/callgraph/cha"
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
"github.com/kisielk/gotool"
"mvdan.cc/lint"
)
func UnusedParams(tests, exported, debug bool, args ...string) ([]string, error) {
wd, err := os.Getwd()
if err != nil {
return nil, err
}
c := &Checker{
wd: wd,
tests: tests,
exported: exported,
}
if debug {
c.debugLog = os.Stderr
}
return c.lines(args...)
}
type Checker struct {
lprog *loader.Program
prog *ssa.Program
wd string
tests bool
exported bool
debugLog io.Writer
cachedDeclCounts map[string]map[string]int
callByPos map[token.Pos]*ast.CallExpr
}
var (
_ lint.Checker = (*Checker)(nil)
_ lint.WithSSA = (*Checker)(nil)
errorType = types.Universe.Lookup("error").Type()
)
func (c *Checker) lines(args ...string) ([]string, error) {
paths := gotool.ImportPaths(args)
conf := loader.Config{
ParserMode: parser.ParseComments,
}
if _, err := conf.FromArgs(paths, c.tests); err != nil {
return nil, err
}
lprog, err := conf.Load()
if err != nil {
return nil, err
}
prog := ssautil.CreateProgram(lprog, 0)
prog.Build()
c.Program(lprog)
c.ProgramSSA(prog)
issues, err := c.Check()
if err != nil {
return nil, err
}
lines := make([]string, len(issues))
for i, issue := range issues {
fpos := prog.Fset.Position(issue.Pos()).String()
if strings.HasPrefix(fpos, c.wd) {
fpos = fpos[len(c.wd)+1:]
}
lines[i] = fmt.Sprintf("%s: %s", fpos, issue.Message())
}
return lines, nil
}
type Issue struct {
pos token.Pos
fname string
msg string
}
func (i Issue) Pos() token.Pos { return i.pos }
func (i Issue) Message() string { return i.fname + " - " + i.msg }
func (c *Checker) Program(lprog *loader.Program) {
c.lprog = lprog
}
func (c *Checker) ProgramSSA(prog *ssa.Program) {
c.prog = prog
}
func (c *Checker) debug(format string, a ...interface{}) {
if c.debugLog != nil {
fmt.Fprintf(c.debugLog, format, a...)
}
}
func generatedDoc(text string) bool {
return strings.Contains(text, "Code generated") ||
strings.Contains(text, "DO NOT EDIT")
}
var stdSizes = types.SizesFor("gc", "amd64")
func (c *Checker) Check() ([]lint.Issue, error) {
c.cachedDeclCounts = make(map[string]map[string]int)
c.callByPos = make(map[token.Pos]*ast.CallExpr)
wantPkg := make(map[*types.Package]*loader.PackageInfo)
genFiles := make(map[string]bool)
for _, info := range c.lprog.InitialPackages() {
wantPkg[info.Pkg] = info
for _, f := range info.Files {
if len(f.Comments) > 0 && generatedDoc(f.Comments[0].Text()) {
fname := c.prog.Fset.Position(f.Pos()).Filename
genFiles[fname] = true
}
ast.Inspect(f, func(node ast.Node) bool {
if ce, ok := node.(*ast.CallExpr); ok {
c.callByPos[ce.Lparen] = ce
}
return true
})
}
}
cg := cha.CallGraph(c.prog)
var issues []lint.Issue
funcLoop:
for fn := range ssautil.AllFunctions(c.prog) {
if fn.Pkg == nil { // builtin?
continue
}
if len(fn.Blocks) == 0 { // stub
continue
}
info := wantPkg[fn.Pkg.Pkg]
if info == nil { // not part of given pkgs
continue
}
if c.exported || fn.Pkg.Pkg.Name() == "main" {
// we want exported funcs, or this is a main
// package so nothing is exported
} else if strings.Contains(fn.Name(), "$") {
// anonymous function
} else if ast.IsExported(fn.Name()) {
continue // user doesn't want to change signatures here
}
fname := c.prog.Fset.Position(fn.Pos()).Filename
if genFiles[fname] {
continue // generated file
}
c.debug("func %s\n", fn.RelString(fn.Package().Pkg))
if dummyImpl(fn.Blocks[0]) { // panic implementation
c.debug(" skip - dummy implementation\n")
continue
}
for _, edge := range cg.Nodes[fn].In {
call := edge.Site.Value()
if receivesExtractedArgs(fn.Signature, call) {
// called via function(results())
c.debug(" skip - type is required via call\n")
continue funcLoop
}
caller := edge.Caller.Func
switch {
case len(caller.FreeVars) == 1 && strings.HasSuffix(caller.Name(), "$bound"):
// passing method via someFunc(type.method)
fallthrough
case len(caller.FreeVars) == 0 && strings.HasSuffix(caller.Name(), "$thunk"):
// passing method via someFunc(recv.method)
c.debug(" skip - type is required via call\n")
continue funcLoop
}
switch edge.Site.Common().Value.(type) {
case *ssa.Function:
default:
// called via a parameter or field, type
// is set in stone.
c.debug(" skip - type is required via call\n")
continue funcLoop
}
}
if c.multipleImpls(info, fn) {
c.debug(" skip - multiple implementations via build tags\n")
continue
}
results := fn.Signature.Results()
seenConsts := make([]constant.Value, results.Len())
seenParams := make([]*ssa.Parameter, results.Len())
numRets := 0
allRetsExtracting := true
for _, block := range fn.Blocks {
last := block.Instrs[len(block.Instrs)-1]
ret, ok := last.(*ssa.Return)
if !ok {
continue
}
for i, val := range ret.Results {
switch x := val.(type) {
case *ssa.Const:
allRetsExtracting = false
seenParams[i] = nil
switch {
case numRets == 0:
seenConsts[i] = x.Value
case seenConsts[i] == nil:
case !constant.Compare(seenConsts[i], token.EQL, x.Value):
seenConsts[i] = nil
}
case *ssa.Parameter:
allRetsExtracting = false
seenConsts[i] = nil
switch {
case numRets == 0:
seenParams[i] = x
case seenParams[i] == nil:
case seenParams[i] != x:
seenParams[i] = nil
}
case *ssa.Extract:
seenConsts[i] = nil
seenParams[i] = nil
default:
allRetsExtracting = false
seenConsts[i] = nil
seenParams[i] = nil
}
}
numRets++
}
for i, val := range seenConsts {
if val == nil || numRets < 2 {
continue
}
res := results.At(i)
name := paramDesc(i, res)
issues = append(issues, Issue{
pos: res.Pos(),
fname: fn.RelString(fn.Package().Pkg),
msg: fmt.Sprintf("result %s is always %s", name, val.String()),
})
}
callers := cg.Nodes[fn].In
resLoop:
for i := 0; i < results.Len(); i++ {
if allRetsExtracting {
continue
}
res := results.At(i)
if res.Type() == errorType {
// "error is never unused" is less
// useful, and it's up to tools like
// errcheck anyway.
continue
}
count := 0
for _, edge := range callers {
val := edge.Site.Value()
if val == nil { // e.g. go statement
count++
continue
}
for _, instr := range *val.Referrers() {
extract, ok := instr.(*ssa.Extract)
if !ok {
continue resLoop // direct, real use
}
if extract.Index != i {
continue // not the same result param
}
if len(*extract.Referrers()) > 0 {
continue resLoop // real use after extraction
}
}
count++
}
if count < 2 {
continue // require ignoring at least twice
}
name := paramDesc(i, res)
issues = append(issues, Issue{
pos: res.Pos(),
fname: fn.RelString(fn.Package().Pkg),
msg: fmt.Sprintf("result %s is never used", name),
})
}
for i, par := range fn.Params {
if i == 0 && fn.Signature.Recv() != nil { // receiver
continue
}
c.debug("%s\n", par.String())
switch par.Object().Name() {
case "", "_": // unnamed
c.debug(" skip - unnamed\n")
continue
}
if stdSizes.Sizeof(par.Type()) == 0 {
c.debug(" skip - zero size\n")
continue
}
reason := "is unused"
if valStr := c.receivesSameValues(cg.Nodes[fn].In, par, i); valStr != "" {
reason = fmt.Sprintf("always receives %s", valStr)
} else if anyRealUse(par, i) {
c.debug(" skip - used somewhere in the func body\n")
continue
}
issues = append(issues, Issue{
pos: par.Pos(),
fname: fn.RelString(fn.Package().Pkg),
msg: fmt.Sprintf("%s %s", par.Name(), reason),
})
}
}
sort.Slice(issues, func(i, j int) bool {
p1 := c.prog.Fset.Position(issues[i].Pos())
p2 := c.prog.Fset.Position(issues[j].Pos())
if p1.Filename == p2.Filename {
return p1.Offset < p2.Offset
}
return p1.Filename < p2.Filename
})
return issues, nil
}
func nodeStr(node ast.Node) string {
var buf bytes.Buffer
fset := token.NewFileSet()
if err := printer.Fprint(&buf, fset, node); err != nil {
panic(err)
}
return buf.String()
}
func (c *Checker) receivesSameValues(in []*callgraph.Edge, par *ssa.Parameter, pos int) string {
if ast.IsExported(par.Parent().Name()) {
// we might not have all call sites for an exported func
return ""
}
var seen constant.Value
origPos := pos
if par.Parent().Signature.Recv() != nil {
// go/ast's CallExpr.Args does not include the receiver,
// but go/ssa's equivalent does.
origPos--
}
seenOrig := ""
count := 0
for _, edge := range in {
call := edge.Site.Common()
cnst, ok := call.Args[pos].(*ssa.Const)
if !ok {
return "" // not a constant
}
origArg := ""
origCall := c.callByPos[call.Pos()]
if origPos >= len(origCall.Args) {
// variadic parameter that wasn't given
} else {
origArg = nodeStr(origCall.Args[origPos])
}
if seen == nil {
seen = cnst.Value // first constant
seenOrig = origArg
count = 1
} else if !constant.Compare(seen, token.EQL, cnst.Value) {
return "" // different constants
} else {
count++
if origArg != seenOrig {
seenOrig = ""
}
}
}
if count < 4 {
return "" // not enough times, likely false positive
}
if seenOrig != "" && seenOrig != seen.String() {
return fmt.Sprintf("%s (%v)", seenOrig, seen)
}
return seen.String()
}
func anyRealUse(par *ssa.Parameter, pos int) bool {
refLoop:
for _, ref := range *par.Referrers() {
switch x := ref.(type) {
case *ssa.Call:
if x.Call.Value != par.Parent() {
return true // not a recursive call
}
for i, arg := range x.Call.Args {
if arg != par {
continue
}
if i == pos {
// reused directly in a recursive call
continue refLoop
}
}
return true
case *ssa.Store:
if insertedStore(x) {
continue // inserted by go/ssa, not from the code
}
return true
default:
return true
}
}
return false
}
func insertedStore(instr ssa.Instruction) bool {
if instr.Pos() != token.NoPos {
return false
}
store, ok := instr.(*ssa.Store)
if !ok {
return false
}
alloc, ok := store.Addr.(*ssa.Alloc)
// we want exactly one use of this alloc value for it to be
// inserted by ssa and dummy - the alloc instruction itself.
return ok && len(*alloc.Referrers()) == 1
}
var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`)
// dummyImpl reports whether a block is a dummy implementation. This is
// true if the block will almost immediately panic, throw or return
// constants only.
func dummyImpl(blk *ssa.BasicBlock) bool {
var ops [8]*ssa.Value
for _, instr := range blk.Instrs {
if insertedStore(instr) {
continue // inserted by go/ssa, not from the code
}
for _, val := range instr.Operands(ops[:0]) {
switch x := (*val).(type) {
case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc,
*ssa.MakeInterface, *ssa.Function,
*ssa.Global, *ssa.IndexAddr, *ssa.Slice,
*ssa.UnOp, *ssa.Parameter:
case *ssa.Call:
if rxHarmlessCall.MatchString(x.Call.Value.String()) {
continue
}
default:
return false
}
}
switch x := instr.(type) {
case *ssa.Alloc, *ssa.Store, *ssa.UnOp, *ssa.BinOp,
*ssa.MakeInterface, *ssa.MakeMap, *ssa.Extract,
*ssa.IndexAddr, *ssa.FieldAddr, *ssa.Slice,
*ssa.Lookup, *ssa.ChangeType, *ssa.TypeAssert,
*ssa.Convert, *ssa.ChangeInterface:
// non-trivial expressions in panic/log/print
// calls
case *ssa.Return, *ssa.Panic:
return true
case *ssa.Call:
if rxHarmlessCall.MatchString(x.Call.Value.String()) {
continue
}
return x.Call.Value.Name() == "throw" // runtime's panic
default:
return false
}
}
return false
}
func (c *Checker) declCounts(pkgDir string, pkgName string) map[string]int {
if m := c.cachedDeclCounts[pkgDir]; m != nil {
return m
}
fset := token.NewFileSet()
pkgs, err := parser.ParseDir(fset, pkgDir, nil, 0)
if err != nil {
println(err.Error())
c.cachedDeclCounts[pkgDir] = map[string]int{}
return map[string]int{}
}
pkg := pkgs[pkgName]
count := make(map[string]int)
for _, file := range pkg.Files {
for _, decl := range file.Decls {
fd, _ := decl.(*ast.FuncDecl)
if fd == nil {
continue
}
name := astPrefix(fd.Recv) + fd.Name.Name
count[name]++
}
}
c.cachedDeclCounts[pkgDir] = count
return count
}
func astPrefix(recv *ast.FieldList) string {
if recv == nil {
return ""
}
expr := recv.List[0].Type
for {
star, _ := expr.(*ast.StarExpr)
if star == nil {
break
}
expr = star.X
}
id := expr.(*ast.Ident)
return id.Name + "."
}
func (c *Checker) multipleImpls(info *loader.PackageInfo, fn *ssa.Function) bool {
if fn.Parent() != nil { // nested func
return false
}
path := c.prog.Fset.Position(fn.Pos()).Filename
if path == "" { // generated func, like init
return false
}
count := c.declCounts(filepath.Dir(path), info.Pkg.Name())
name := fn.Name()
if fn.Signature.Recv() != nil {
tp := fn.Params[0].Type()
for {
point, _ := tp.(*types.Pointer)
if point == nil {
break
}
tp = point.Elem()
}
named := tp.(*types.Named)
name = named.Obj().Name() + "." + name
}
return count[name] > 1
}
func receivesExtractedArgs(sign *types.Signature, call *ssa.Call) bool {
if call == nil {
return false
}
if sign.Params().Len() < 2 {
return false // extracting into one param is ok
}
args := call.Operands(nil)
for i, arg := range args {
if i == 0 {
continue // *ssa.Function, func itself
}
if i == 1 && sign.Recv() != nil {
continue // method receiver
}
if _, ok := (*arg).(*ssa.Extract); !ok {
return false
}
}
return true
}
func paramDesc(i int, v *types.Var) string {
name := v.Name()
if name != "" {
return name
}
return fmt.Sprintf("%d (%s)", i, v.Type().String())
}

34
tools/vendor/mvdan.cc/unparam/main.go vendored Normal file
View File

@@ -0,0 +1,34 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package main // import "mvdan.cc/unparam"
import (
"flag"
"fmt"
"os"
"mvdan.cc/unparam/check"
)
var (
tests = flag.Bool("tests", true, "include tests")
exported = flag.Bool("exported", false, "inspect exported functions")
debug = flag.Bool("debug", false, "debug prints")
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, "usage: unparam [flags] [package ...]")
flag.PrintDefaults()
}
flag.Parse()
warns, err := check.UnusedParams(*tests, *exported, *debug, flag.Args()...)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
for _, warn := range warns {
fmt.Println(warn)
}
}

View File

@@ -9,10 +9,10 @@
"revisionTime": "2017-02-19T07:16:37Z" "revisionTime": "2017-02-19T07:16:37Z"
}, },
{ {
"checksumSHA1": "HORezzSZjiTLlEG6RV5e3VoBMTI=", "checksumSHA1": "IL9TI69eihi/XUfY/k/uuBRI8WY=",
"path": "github.com/GoASTScanner/gas", "path": "github.com/GoASTScanner/gas",
"revision": "1beec25f7754273c9672a3368ea7048d4e73138e", "revision": "6de76c92610b387855cdfdd53c99b149928916f7",
"revisionTime": "2017-04-11T19:38:53Z" "revisionTime": "2017-10-04T14:01:47Z"
}, },
{ {
"checksumSHA1": "sK1dOo48F424xLCvE+ic8tRk7i8=", "checksumSHA1": "sK1dOo48F424xLCvE+ic8tRk7i8=",
@@ -39,10 +39,10 @@
"revisionTime": "2015-02-08T22:17:26Z" "revisionTime": "2015-02-08T22:17:26Z"
}, },
{ {
"checksumSHA1": "dGfv3nwAfhSJ+20gR0yYgUwjPVY=", "checksumSHA1": "HL3Dyr4dmbtBo+V3ULfRJMWAyoA=",
"path": "github.com/alecthomas/gometalinter", "path": "github.com/alecthomas/gometalinter",
"revision": "258ea75208e542b8a18a08e053ec10379a9f50f9", "revision": "212b1b91e362ea0b0e441c9b53ce31e81405c240",
"revisionTime": "2017-08-02T01:03:20Z" "revisionTime": "2017-11-26T10:02:12Z"
}, },
{ {
"checksumSHA1": "fCc3grA7vIxfBru7R3SqjcW+oLI=", "checksumSHA1": "fCc3grA7vIxfBru7R3SqjcW+oLI=",
@@ -62,6 +62,12 @@
"revision": "e80c3b7ed292b052c7083b6fd7154a8422c33f65", "revision": "e80c3b7ed292b052c7083b6fd7154a8422c33f65",
"revisionTime": "2017-02-16T02:04:25Z" "revisionTime": "2017-02-16T02:04:25Z"
}, },
{
"checksumSHA1": "rDM1YOCSZE4BLxZoBJV56/VmZSo=",
"path": "github.com/alexkohler/nakedret",
"revision": "c0e305a4f690fed163d47628bcc06a6d5655bf92",
"revisionTime": "2017-11-06T22:32:15Z"
},
{ {
"checksumSHA1": "z6mKUmWeXRT0k+xrXxA5CLKOWiE=", "checksumSHA1": "z6mKUmWeXRT0k+xrXxA5CLKOWiE=",
"path": "github.com/client9/misspell", "path": "github.com/client9/misspell",
@@ -69,10 +75,10 @@
"revisionTime": "2017-05-30T22:15:07Z" "revisionTime": "2017-05-30T22:15:07Z"
}, },
{ {
"checksumSHA1": "YdtKuQqs3pDJhMxOruVxLAD/JSo=", "checksumSHA1": "3Lbx+qNi8brwKa9dU41O6SEOW6c=",
"path": "github.com/client9/misspell/cmd/misspell", "path": "github.com/client9/misspell/cmd/misspell",
"revision": "e1f24e3e0b6b2c8bc98584370d382ae095b13c94", "revision": "9ce5d979ffdaca6385988d7ad1079a33ec942d20",
"revisionTime": "2017-06-21T02:16:58Z" "revisionTime": "2017-09-28T00:02:06Z"
}, },
{ {
"checksumSHA1": "ULnk7ggN82JFO0ZdBCmSsQH3Vh8=", "checksumSHA1": "ULnk7ggN82JFO0ZdBCmSsQH3Vh8=",
@@ -80,6 +86,30 @@
"revision": "855e8d98f1852d48dde521e0522408d1fe7e836a", "revision": "855e8d98f1852d48dde521e0522408d1fe7e836a",
"revisionTime": "2015-10-22T10:31:44Z" "revisionTime": "2015-10-22T10:31:44Z"
}, },
{
"checksumSHA1": "aLXmB5i+PFfM/dVqfHxoB3a8v/g=",
"path": "github.com/dnephin/govet",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{
"checksumSHA1": "Ba6cj2wCpDZcjE0kZ4Q32PJW4fg=",
"path": "github.com/dnephin/govet/internal/cfg",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{
"checksumSHA1": "ttQiZmni3k7Tbfi4/CX3JG6NjAw=",
"path": "github.com/dnephin/govet/internal/whitelist",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"revisionTime": "2017-10-12T18:51:37Z",
"version": "fork",
"versionExact": "fork"
},
{ {
"checksumSHA1": "+U50xksZS8g53vruOP7px7cLilE=", "checksumSHA1": "+U50xksZS8g53vruOP7px7cLilE=",
"path": "github.com/golang/lint", "path": "github.com/golang/lint",
@@ -89,8 +119,8 @@
{ {
"checksumSHA1": "REm9ddVDL+Pm1OPxjVMxxpIjyw4=", "checksumSHA1": "REm9ddVDL+Pm1OPxjVMxxpIjyw4=",
"path": "github.com/golang/lint/golint", "path": "github.com/golang/lint/golint",
"revision": "c5fb716d6688a859aae56d26d3e6070808df29f7", "revision": "6aaf7c34af0f4c36a57e0c429bace4d706d8e931",
"revisionTime": "2017-06-02T23:41:31Z" "revisionTime": "2017-10-05T22:33:36Z"
}, },
{ {
"checksumSHA1": "e/Kc2UOy1lKAy31xWlK37M1r2e8=", "checksumSHA1": "e/Kc2UOy1lKAy31xWlK37M1r2e8=",
@@ -99,10 +129,10 @@
"revisionTime": "2015-01-27T13:39:51Z" "revisionTime": "2015-01-27T13:39:51Z"
}, },
{ {
"checksumSHA1": "F4OFLBCfPosjmNN+mpLtdbmlg2g=", "checksumSHA1": "TKaX+8YCZQwYv7Kiy2o/E86V0aE=",
"path": "github.com/gordonklaus/ineffassign", "path": "github.com/gordonklaus/ineffassign",
"revision": "08cd83b3f513abe575f40d23acf2ae79088128bb", "revision": "7bae11eba15a3285c75e388f77eb6357a2d73ee2",
"revisionTime": "2017-06-26T19:28:42Z" "revisionTime": "2017-11-18T19:06:32Z"
}, },
{ {
"checksumSHA1": "DbSCKltce7IrgpDUF8+C7J+z+GU=", "checksumSHA1": "DbSCKltce7IrgpDUF8+C7J+z+GU=",
@@ -119,8 +149,8 @@
{ {
"checksumSHA1": "NKvKUGq0lp/GjLS7Ffp7BAjcoTg=", "checksumSHA1": "NKvKUGq0lp/GjLS7Ffp7BAjcoTg=",
"path": "github.com/kardianos/govendor", "path": "github.com/kardianos/govendor",
"revision": "b6d23590f46ec4816cb726e013bae44954d58972", "revision": "d644d03c4cc00cf7c88a365de8508c12ce56c392",
"revisionTime": "2017-07-28T15:58:28Z" "revisionTime": "2017-11-27T22:34:05Z"
}, },
{ {
"checksumSHA1": "m24kWw3bFoAkKVvTjmxSLsywdHY=", "checksumSHA1": "m24kWw3bFoAkKVvTjmxSLsywdHY=",
@@ -195,10 +225,10 @@
"revisionTime": "2017-05-06T05:20:04Z" "revisionTime": "2017-05-06T05:20:04Z"
}, },
{ {
"checksumSHA1": "eva6MIhWu7k6BWB8G7OOYTJnpeA=", "checksumSHA1": "GkLbM8KFqtVcp4kzc40+VinQukY=",
"path": "github.com/kisielk/errcheck", "path": "github.com/kisielk/errcheck",
"revision": "23699b7e2cbfdb89481023524954ba2aeff6be90", "revision": "b1445a9dd8285a50c6d1661d16f0a9ceb08125f7",
"revisionTime": "2017-03-17T17:34:29Z" "revisionTime": "2017-09-18T09:31:01Z"
}, },
{ {
"checksumSHA1": "GP25rgIPshJh0tpiBg3Z8Dexqj4=", "checksumSHA1": "GP25rgIPshJh0tpiBg3Z8Dexqj4=",
@@ -213,10 +243,16 @@
"revisionTime": "2016-11-30T08:01:11Z" "revisionTime": "2016-11-30T08:01:11Z"
}, },
{ {
"checksumSHA1": "18oHyXPLzfVDaXtMYIUyK23+slg=", "checksumSHA1": "+ArBDwAK19OC1iDy1yEbXOC9sgQ=",
"path": "github.com/mattn/goveralls", "path": "github.com/mattn/goveralls",
"revision": "6efce81852ad1b7567c17ad71b03aeccc9dd9ae0", "revision": "b71a1e4855f87991aff01c2c833a75a07059c61c",
"revisionTime": "2017-07-18T16:42:45Z" "revisionTime": "2017-11-14T04:29:57Z"
},
{
"checksumSHA1": "dk0ehYSmMaGLWFQPND3cVgk744I=",
"path": "github.com/mdempsky/maligned",
"revision": "08c8e9db1bce03f1af283686c0943fcb75f0109e",
"revisionTime": "2016-08-25T09:47:39Z"
}, },
{ {
"checksumSHA1": "90pFJb64MwgYvN5AmPVaWl87ZyU=", "checksumSHA1": "90pFJb64MwgYvN5AmPVaWl87ZyU=",
@@ -225,10 +261,10 @@
"revisionTime": "2016-08-03T23:01:54Z" "revisionTime": "2016-08-03T23:01:54Z"
}, },
{ {
"checksumSHA1": "aJa6VmPp5WZZ2Z705/TdhqQG/k4=", "checksumSHA1": "k3eGAQ+pCIffVpgvoBrPuLK6Yz8=",
"path": "github.com/mibk/dupl", "path": "github.com/mibk/dupl",
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6", "revision": "72dc2d83bec70e053e9294378aacb1a032f51a31",
"revisionTime": "2017-02-27T22:14:17Z" "revisionTime": "2017-11-19T16:48:37Z"
}, },
{ {
"checksumSHA1": "sHi3Qhc2/0XMcIXB31NLXkbIoz0=", "checksumSHA1": "sHi3Qhc2/0XMcIXB31NLXkbIoz0=",
@@ -242,6 +278,12 @@
"revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6", "revision": "3447d9b0cb5a3e7dccb1a9f1c975f35683f304e6",
"revisionTime": "2017-02-27T22:14:17Z" "revisionTime": "2017-02-27T22:14:17Z"
}, },
{
"checksumSHA1": "mGQ3tVbY9uLwfwoeQjvBBBm7yRw=",
"path": "github.com/mibk/dupl/printer",
"revision": "72dc2d83bec70e053e9294378aacb1a032f51a31",
"revisionTime": "2017-11-19T16:48:37Z"
},
{ {
"checksumSHA1": "o9BJwhna5BuCTYWBGBo9VA+Ez/M=", "checksumSHA1": "o9BJwhna5BuCTYWBGBo9VA+Ez/M=",
"path": "github.com/mibk/dupl/suffixtree", "path": "github.com/mibk/dupl/suffixtree",
@@ -269,8 +311,8 @@
{ {
"checksumSHA1": "zD/VW+BRbOjxk1xq5bmdigi0cp8=", "checksumSHA1": "zD/VW+BRbOjxk1xq5bmdigi0cp8=",
"path": "github.com/mvdan/interfacer/cmd/interfacer", "path": "github.com/mvdan/interfacer/cmd/interfacer",
"revision": "0b82697b33395e082e4010e4122abb213eadfb5e", "revision": "22c51662ff476dfd97944f74db1b263ed920ee83",
"revisionTime": "2017-08-01T09:14:01Z" "revisionTime": "2017-04-06T16:05:15Z"
}, },
{ {
"checksumSHA1": "18GDIJCo0vo+mmQDIYmyb2JSWqo=", "checksumSHA1": "18GDIJCo0vo+mmQDIYmyb2JSWqo=",
@@ -279,16 +321,16 @@
"revisionTime": "2017-04-06T10:09:31Z" "revisionTime": "2017-04-06T10:09:31Z"
}, },
{ {
"checksumSHA1": "nV5rJzg1w+8RG7lgDtuMXvtgJ6g=", "checksumSHA1": "5LiZtu67exUdRJ0/QQvU/epG9no=",
"path": "github.com/mvdan/unparam", "path": "github.com/mvdan/unparam",
"revision": "4f8ea7ae6525529da4e3c90bda033935b80d709a", "revision": "d647bb803b10a6777ee4c6a176416b91fa14713e",
"revisionTime": "2017-08-02T23:35:07Z" "revisionTime": "2017-05-30T08:59:07Z"
}, },
{ {
"checksumSHA1": "VE/ZFPAtX2obu4EFt1ajO8RydfU=", "checksumSHA1": "tuOLCrGa9DjfXheKkMXtHtQu3bs=",
"path": "github.com/mvdan/unparam/check", "path": "github.com/mvdan/unparam/check",
"revision": "4f8ea7ae6525529da4e3c90bda033935b80d709a", "revision": "d647bb803b10a6777ee4c6a176416b91fa14713e",
"revisionTime": "2017-08-02T23:35:07Z" "revisionTime": "2017-05-30T08:59:07Z"
}, },
{ {
"checksumSHA1": "DP8R0Q7TDlHbhz9Livyj8RkRKvU=", "checksumSHA1": "DP8R0Q7TDlHbhz9Livyj8RkRKvU=",
@@ -461,14 +503,14 @@
{ {
"checksumSHA1": "V4M/6A62nVBzPFxPbN+EAatCrVs=", "checksumSHA1": "V4M/6A62nVBzPFxPbN+EAatCrVs=",
"path": "golang.org/x/tools/cmd/goimports", "path": "golang.org/x/tools/cmd/goimports",
"revision": "4e70a1b26a7875f00ca1916637a876b5ffaeec59", "revision": "36c7af3342056179a831c19ec142f2763b310f7b",
"revisionTime": "2017-08-02T20:37:59Z" "revisionTime": "2017-11-29T19:51:57Z"
}, },
{ {
"checksumSHA1": "V6/A1ZOZ2GUOZcRWcXegtci2FoU=", "checksumSHA1": "V6/A1ZOZ2GUOZcRWcXegtci2FoU=",
"path": "golang.org/x/tools/cmd/gotype", "path": "golang.org/x/tools/cmd/gotype",
"revision": "4e70a1b26a7875f00ca1916637a876b5ffaeec59", "revision": "36c7af3342056179a831c19ec142f2763b310f7b",
"revisionTime": "2017-08-02T20:37:59Z" "revisionTime": "2017-11-29T19:51:57Z"
}, },
{ {
"checksumSHA1": "nD89PLkMqA5CakR8SoDuj3iQz1M=", "checksumSHA1": "nD89PLkMqA5CakR8SoDuj3iQz1M=",
@@ -575,44 +617,50 @@
{ {
"checksumSHA1": "FG8LnaSRTHBnrPHwa0zW4zX9K7M=", "checksumSHA1": "FG8LnaSRTHBnrPHwa0zW4zX9K7M=",
"path": "honnef.co/go/tools/callgraph", "path": "honnef.co/go/tools/callgraph",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "fR7Q7BVwKHUEsUNGn6Q2zygAvTU=", "checksumSHA1": "fR7Q7BVwKHUEsUNGn6Q2zygAvTU=",
"path": "honnef.co/go/tools/callgraph/static", "path": "honnef.co/go/tools/callgraph/static",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "tZUNdNOtkUpEB27J1ob00AFjsW4=", "checksumSHA1": "YL/UCzWYvDXeFInLOLC1buYve6w=",
"path": "honnef.co/go/tools/cmd/gosimple", "path": "honnef.co/go/tools/cmd/gosimple",
"revision": "ae0caf6437a82200bc1c349a4f761b4106409f0b", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-07-31T15:06:36Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "pHczeoSy3ltbM5mEQzrG845aiAk=", "checksumSHA1": "84jyAI0Uv1PQ3fN3Ufi0T7/IpOw=",
"path": "honnef.co/go/tools/cmd/megacheck", "path": "honnef.co/go/tools/cmd/megacheck",
"revision": "ae0caf6437a82200bc1c349a4f761b4106409f0b", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-07-31T15:06:36Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "fCBb/czTIH/xy9Ot7sA95uRDPxE=", "checksumSHA1": "dP4Ft0yiZSTZOzzNho1Gg5b7o2w=",
"path": "honnef.co/go/tools/cmd/staticcheck", "path": "honnef.co/go/tools/cmd/staticcheck",
"revision": "ae0caf6437a82200bc1c349a4f761b4106409f0b", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-07-31T15:06:36Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "TXSOAGnX/YpKr2hBk9EiE3OlODI=", "checksumSHA1": "Qipy1/3Z8n4UnoWF9X0sQ/VC5JI=",
"path": "honnef.co/go/tools/cmd/unused", "path": "honnef.co/go/tools/cmd/unused",
"revision": "ae0caf6437a82200bc1c349a4f761b4106409f0b", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-07-31T15:06:36Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "3oKDy41034t1Hh/cEp2zvhMTU0w=", "checksumSHA1": "smQXvyCgi0lsTRk7edZNx/z44rc=",
"path": "honnef.co/go/tools/deprecated",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "ZQAEQCc18o76M9Cyncm1W5cczJ8=",
"path": "honnef.co/go/tools/functions", "path": "honnef.co/go/tools/functions",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "NPXYxmyCQTv53OmGTCiHvbfIct4=", "checksumSHA1": "NPXYxmyCQTv53OmGTCiHvbfIct4=",
@@ -623,57 +671,97 @@
{ {
"checksumSHA1": "ZWtH73AO33mmXmK2RfGwld1/00I=", "checksumSHA1": "ZWtH73AO33mmXmK2RfGwld1/00I=",
"path": "honnef.co/go/tools/internal/sharedcheck", "path": "honnef.co/go/tools/internal/sharedcheck",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "ALFoG0EDHSENgXXKVakOwdwXUSU=", "checksumSHA1": "44oONKG61hcaBAPaA2jNhBgYLmE=",
"path": "honnef.co/go/tools/lint", "path": "honnef.co/go/tools/lint",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "jevSfIBFuPrD5LPY/jF8EQWUDa8=", "checksumSHA1": "gKJKwlFyfVebwzqA3P/N3HJIq/0=",
"path": "honnef.co/go/tools/lint/lintutil", "path": "honnef.co/go/tools/lint/lintutil",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "Qws66gYHUhxTO8ddXJTdr0Wl9d8=", "checksumSHA1": "kkVszwWx3L3erU3QkMDIppFv34o=",
"path": "honnef.co/go/tools/simple", "path": "honnef.co/go/tools/simple",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "OB5QjdkxC9rYXruXUuoYSsxK+VY=", "checksumSHA1": "OB5QjdkxC9rYXruXUuoYSsxK+VY=",
"path": "honnef.co/go/tools/ssa", "path": "honnef.co/go/tools/ssa",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "jgNTrcXg52qlqjkb/R2vKxtcDu4=", "checksumSHA1": "jgNTrcXg52qlqjkb/R2vKxtcDu4=",
"path": "honnef.co/go/tools/ssa/ssautil", "path": "honnef.co/go/tools/ssa/ssautil",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "/z2xUUyR/MlzlPdU06U2g12P2oU=", "checksumSHA1": "9kqdoLEm2gHS9QVE1OXWBCtRqhI=",
"path": "honnef.co/go/tools/staticcheck", "path": "honnef.co/go/tools/staticcheck",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "9u74dwwwi+tg9eBr86by4i4CMNM=", "checksumSHA1": "9u74dwwwi+tg9eBr86by4i4CMNM=",
"path": "honnef.co/go/tools/staticcheck/vrp", "path": "honnef.co/go/tools/staticcheck/vrp",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
}, },
{ {
"checksumSHA1": "4rxcUL+C0+sbSNRQvkWeCJ4J0Hg=", "checksumSHA1": "tDBL3athXaJ9JoiY75NktH+OTjQ=",
"path": "honnef.co/go/tools/unused", "path": "honnef.co/go/tools/unused",
"revision": "e94d1c1a34c6b61d8d06c7793b8f22cd0dfcdd90", "revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-05-22T19:09:05Z" "revisionTime": "2017-11-25T07:40:24Z"
},
{
"checksumSHA1": "RY0sZkXnDI/MxBauBD28dwuulSs=",
"path": "honnef.co/go/tools/version",
"revision": "376b3b58b9e4def403181ee2fd3d4cc7de8375ae",
"revisionTime": "2017-11-25T07:40:24Z"
},
{
"path": "local/numcpus",
"revision": ""
},
{
"checksumSHA1": "uKQMTzjTI15hy1sTGgsZ/b8PImA=",
"path": "mvdan.cc/interfacer",
"revision": "d7e7372184a059b8fd99d96a593e3811bf989d75",
"revisionTime": "2017-09-08T18:13:45Z"
},
{
"checksumSHA1": "0+bmt/m62xZSbyATqBbp1MTy6ZI=",
"path": "mvdan.cc/interfacer/check",
"revision": "d7e7372184a059b8fd99d96a593e3811bf989d75",
"revisionTime": "2017-09-08T18:13:45Z"
},
{
"checksumSHA1": "pCQUv3qVciM9V98kVNkOw1JWKzs=",
"path": "mvdan.cc/lint",
"revision": "adc824a0674b99099789b6188a058d485eaf61c0",
"revisionTime": "2017-09-08T18:12:59Z"
},
{
"checksumSHA1": "TCj3HvSF+NLYG4SPv7Mv4GQ09xE=",
"path": "mvdan.cc/unparam",
"revision": "644240604b3c6d719b0f981ef28cd1168962efb9",
"revisionTime": "2017-11-25T20:11:03Z"
},
{
"checksumSHA1": "Cdz2FebEWti1vA3eW36lypbYn9w=",
"path": "mvdan.cc/unparam/check",
"revision": "644240604b3c6d719b0f981ef28cd1168962efb9",
"revisionTime": "2017-11-25T20:11:03Z"
} }
], ],
"rootPath": "github.com/wrouesnel/postgres_exporter/tools" "rootPath": "github.com/wrouesnel/self-contained-go-project/tools"
} }