Merge pull request #493 from simonpasquier/update-makefile-common

fix static check errors and removed a lot of unused code and vars.
updated Makefile.common
pull/5805/head
Krasi Georgiev 2019-02-08 11:47:06 +02:00 committed by GitHub
commit 1e91d619de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 48 additions and 177 deletions

View File

@ -29,12 +29,15 @@ GO ?= go
GOFMT ?= $(GO)fmt
FIRST_GOPATH := $(firstword $(subst :, ,$(shell $(GO) env GOPATH)))
GOOPTS ?=
GOHOSTOS ?= $(shell $(GO) env GOHOSTOS)
GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH)
GO_VERSION ?= $(shell $(GO) version)
GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))
PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.')
unexport GOVENDOR
GOVENDOR :=
GO111MODULE :=
ifeq (, $(PRE_GO_111))
ifneq (,$(wildcard go.mod))
# Enforce Go modules support just in case the directory is inside GOPATH (and for Travis CI).
@ -55,24 +58,35 @@ $(warning Some recipes may not work as expected as the current Go runtime is '$(
# This repository isn't using Go modules (yet).
GOVENDOR := $(FIRST_GOPATH)/bin/govendor
endif
unexport GO111MODULE
endif
PROMU := $(FIRST_GOPATH)/bin/promu
STATICCHECK := $(FIRST_GOPATH)/bin/staticcheck
pkgs = ./...
GO_VERSION ?= $(shell $(GO) version)
GO_BUILD_PLATFORM ?= $(subst /,-,$(lastword $(GO_VERSION)))
ifeq (arm, $(GOHOSTARCH))
GOHOSTARM ?= $(shell GOARM= $(GO) env GOARM)
GO_BUILD_PLATFORM ?= $(GOHOSTOS)-$(GOHOSTARCH)v$(GOHOSTARM)
else
GO_BUILD_PLATFORM ?= $(GOHOSTOS)-$(GOHOSTARCH)
endif
PROMU_VERSION ?= 0.2.0
PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz
STATICCHECK_VERSION ?= 2019.1
STATICCHECK_URL := https://github.com/dominikh/go-tools/releases/download/$(STATICCHECK_VERSION)/staticcheck_$(GOHOSTOS)_$(GOHOSTARCH)
PREFIX ?= $(shell pwd)
BIN_DIR ?= $(shell pwd)
DOCKER_IMAGE_TAG ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
DOCKER_REPO ?= prom
ifeq ($(GOHOSTARCH),amd64)
ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux freebsd darwin windows))
# Only supported on amd64
test-flags := -race
endif
endif
.PHONY: all
all: precheck style staticcheck unused build test
@ -110,12 +124,12 @@ common-test-short:
.PHONY: common-test
common-test:
@echo ">> running all tests"
GO111MODULE=$(GO111MODULE) $(GO) test -race $(GOOPTS) $(pkgs)
GO111MODULE=$(GO111MODULE) $(GO) test $(test-flags) $(GOOPTS) $(pkgs)
.PHONY: common-format
common-format:
@echo ">> formatting code"
GO111MODULE=$(GO111MODULE) $(GO) fmt $(GOOPTS) $(pkgs)
GO111MODULE=$(GO111MODULE) $(GO) fmt $(pkgs)
.PHONY: common-vet
common-vet:
@ -125,8 +139,12 @@ common-vet:
.PHONY: common-staticcheck
common-staticcheck: $(STATICCHECK)
@echo ">> running staticcheck"
chmod +x $(STATICCHECK)
ifdef GO111MODULE
GO111MODULE=$(GO111MODULE) $(STATICCHECK) -ignore "$(STATICCHECK_IGNORE)" -checks "SA*" $(pkgs)
# 'go list' needs to be executed before staticcheck to prepopulate the modules cache.
# Otherwise staticcheck might fail randomly for some reason not yet explained.
GO111MODULE=$(GO111MODULE) $(GO) list -e -compiled -test=true -export=false -deps=true -find=false -tags= -- ./... > /dev/null
GO111MODULE=$(GO111MODULE) $(STATICCHECK) -ignore "$(STATICCHECK_IGNORE)" $(pkgs)
else
$(STATICCHECK) -ignore "$(STATICCHECK_IGNORE)" $(pkgs)
endif
@ -140,8 +158,9 @@ else
ifdef GO111MODULE
@echo ">> running check for unused/missing packages in go.mod"
GO111MODULE=$(GO111MODULE) $(GO) mod tidy
ifeq (,$(wildcard vendor))
@git diff --exit-code -- go.sum go.mod
ifneq (,$(wildcard vendor))
else
@echo ">> running check for unused packages in vendor/"
GO111MODULE=$(GO111MODULE) $(GO) mod vendor
@git diff --exit-code -- go.sum go.mod vendor/
@ -175,30 +194,20 @@ common-docker-tag-latest:
promu: $(PROMU)
$(PROMU):
curl -s -L $(PROMU_URL) | tar -xvz -C /tmp
mkdir -v -p $(FIRST_GOPATH)/bin
cp -v /tmp/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(PROMU)
$(eval PROMU_TMP := $(shell mktemp -d))
curl -s -L $(PROMU_URL) | tar -xvzf - -C $(PROMU_TMP)
mkdir -p $(FIRST_GOPATH)/bin
cp $(PROMU_TMP)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(FIRST_GOPATH)/bin/promu
rm -r $(PROMU_TMP)
.PHONY: proto
proto:
@echo ">> generating code from proto files"
@./scripts/genproto.sh
.PHONY: $(STATICCHECK)
$(STATICCHECK):
ifdef GO111MODULE
# Get staticcheck from a temporary directory to avoid modifying the local go.{mod,sum}.
# See https://github.com/golang/go/issues/27643.
# For now, we are using the next branch of staticcheck because master isn't compatible yet with Go modules.
tmpModule=$$(mktemp -d 2>&1) && \
mkdir -p $${tmpModule}/staticcheck && \
cd "$${tmpModule}"/staticcheck && \
GO111MODULE=on $(GO) mod init example.com/staticcheck && \
GO111MODULE=on GOOS= GOARCH= $(GO) get -u honnef.co/go/tools/cmd/staticcheck@next && \
rm -rf $${tmpModule};
else
GOOS= GOARCH= GO111MODULE=off $(GO) get -u honnef.co/go/tools/cmd/staticcheck
endif
mkdir -p $(FIRST_GOPATH)/bin
curl -s -L $(STATICCHECK_URL) > $(STATICCHECK)
ifdef GOVENDOR
.PHONY: $(GOVENDOR)

View File

@ -53,16 +53,6 @@ func newBReader(b []byte) bstream {
return bstream{stream: b, count: 8}
}
func newBWriter(size int) *bstream {
return &bstream{stream: make([]byte, 0, size), count: 0}
}
func (b *bstream) clone() *bstream {
d := make([]byte, len(b.stream))
copy(d, b.stream)
return &bstream{stream: d, count: b.count}
}
func (b *bstream) bytes() []byte {
return b.stream
}

View File

@ -32,7 +32,7 @@ func TestChunk(t *testing.T) {
for enc, nc := range map[Encoding]func() Chunk{
EncXOR: func() Chunk { return NewXORChunk() },
} {
t.Run(fmt.Sprintf("%s", enc), func(t *testing.T) {
t.Run(fmt.Sprintf("%v", enc), func(t *testing.T) {
for range make([]struct{}, 1) {
c := nc()
if err := testChunk(c); err != nil {

View File

@ -64,9 +64,7 @@ func (cm *Meta) OverlapsClosedInterval(mint, maxt int64) bool {
}
var (
errInvalidSize = fmt.Errorf("invalid size")
errInvalidFlag = fmt.Errorf("invalid flag")
errInvalidChecksum = fmt.Errorf("invalid checksum")
errInvalidSize = fmt.Errorf("invalid size")
)
var castagnoliTable *crc32.Table

View File

@ -87,7 +87,7 @@ func main() {
block = blocks[len(blocks)-1]
}
if block == nil {
exitWithError(fmt.Errorf("Block not found"))
exitWithError(fmt.Errorf("block not found"))
}
analyzeBlock(block, *analyzeLimit)
}
@ -340,12 +340,6 @@ func measureTime(stage string, f func()) time.Duration {
return time.Since(start)
}
func mapToLabels(m map[string]interface{}, l *labels.Labels) {
for k, v := range m {
*l = append(*l, labels.Label{Name: k, Value: v.(string)})
}
}
func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
scanner := bufio.NewScanner(r)

View File

@ -71,7 +71,6 @@ type Compactor interface {
// LeveledCompactor implements the Compactor interface.
type LeveledCompactor struct {
dir string
metrics *compactorMetrics
logger log.Logger
ranges []int64

View File

@ -518,8 +518,6 @@ func TestDB_e2e(t *testing.T) {
numDatapoints = 1000
numRanges = 1000
timeInterval = int64(3)
maxTime = int64(2 * 1000)
minTime = int64(200)
)
// Create 8 series with 1000 data-points of different ranges and run queries.
lbls := [][]labels.Label{
@ -670,8 +668,6 @@ func TestDB_e2e(t *testing.T) {
q.Close()
}
}
return
}
func TestWALFlushedOnDBClose(t *testing.T) {

View File

@ -15,8 +15,6 @@ package tsdb
import (
"encoding/binary"
"hash"
"hash/crc32"
"unsafe"
"github.com/pkg/errors"
@ -32,17 +30,12 @@ type encbuf struct {
func (e *encbuf) reset() { e.b = e.b[:0] }
func (e *encbuf) get() []byte { return e.b }
func (e *encbuf) len() int { return len(e.b) }
func (e *encbuf) putString(s string) { e.b = append(e.b, s...) }
func (e *encbuf) putBytes(b []byte) { e.b = append(e.b, b...) }
func (e *encbuf) putByte(c byte) { e.b = append(e.b, c) }
func (e *encbuf) putBE32int(x int) { e.putBE32(uint32(x)) }
func (e *encbuf) putBE64int(x int) { e.putBE64(uint64(x)) }
func (e *encbuf) putBE64int64(x int64) { e.putBE64(uint64(x)) }
func (e *encbuf) putUvarint32(x uint32) { e.putUvarint64(uint64(x)) }
func (e *encbuf) putUvarint(x int) { e.putUvarint64(uint64(x)) }
func (e *encbuf) putBE64int64(x int64) { e.putBE64(uint64(x)) }
func (e *encbuf) putUvarint(x int) { e.putUvarint64(uint64(x)) }
func (e *encbuf) putBE32(x uint32) {
binary.BigEndian.PutUint32(e.c[:], x)
@ -71,16 +64,6 @@ func (e *encbuf) putUvarintStr(s string) {
e.putString(s)
}
// putHash appends a hash over the buffers current contents to the buffer.
func (e *encbuf) putHash(h hash.Hash) {
h.Reset()
_, err := h.Write(e.b)
if err != nil {
panic(err) // The CRC32 implementation does not error
}
e.b = h.Sum(e.b)
}
// decbuf provides safe methods to extract data from a byte slice. It does all
// necessary bounds checking and advancing of the byte slice.
// Several datums can be extracted without checking for errors. However, before using
@ -90,15 +73,8 @@ type decbuf struct {
e error
}
func (d *decbuf) uvarint() int { return int(d.uvarint64()) }
func (d *decbuf) uvarint32() uint32 { return uint32(d.uvarint64()) }
func (d *decbuf) be32int() int { return int(d.be32()) }
func (d *decbuf) be64int64() int64 { return int64(d.be64()) }
// crc32 returns a CRC32 checksum over the remaining bytes.
func (d *decbuf) crc32() uint32 {
return crc32.Checksum(d.b, castagnoliTable)
}
func (d *decbuf) uvarint() int { return int(d.uvarint64()) }
func (d *decbuf) be64int64() int64 { return int64(d.be64()) }
func (d *decbuf) uvarintStr() string {
l := d.uvarint64()
@ -179,18 +155,6 @@ func (d *decbuf) byte() byte {
return x
}
func (d *decbuf) decbuf(l int) decbuf {
if d.e != nil {
return decbuf{e: d.e}
}
if l > len(d.b) {
return decbuf{e: errInvalidSize}
}
r := decbuf{b: d.b[:l]}
d.b = d.b[l:]
return r
}
func (d *decbuf) err() error { return d.e }
func (d *decbuf) len() int { return len(d.b) }
func (d *decbuf) get() []byte { return d.b }

View File

@ -77,9 +77,8 @@ func copyFile(src, dest string) error {
// returns relative paths to all files and empty directories.
func readDirs(src string) ([]string, error) {
var files []string
var err error
err = filepath.Walk(src, func(path string, f os.FileInfo, err error) error {
err := filepath.Walk(src, func(path string, f os.FileInfo, err error) error {
relativePath := strings.TrimPrefix(path, src)
if len(relativePath) > 0 {
files = append(files, relativePath)

View File

@ -1628,11 +1628,6 @@ func (ss stringset) set(s string) {
ss[s] = struct{}{}
}
func (ss stringset) has(s string) bool {
_, ok := ss[s]
return ok
}
func (ss stringset) String() string {
return strings.Join(ss.slice(), ",")
}

View File

@ -585,7 +585,6 @@ func TestDelete_e2e(t *testing.T) {
}
}
}
return
}
func boundedSamples(full []sample, mint, maxt int64) []sample {

View File

@ -33,12 +33,9 @@ func (e *encbuf) get() []byte { return e.b }
func (e *encbuf) len() int { return len(e.b) }
func (e *encbuf) putString(s string) { e.b = append(e.b, s...) }
func (e *encbuf) putBytes(b []byte) { e.b = append(e.b, b...) }
func (e *encbuf) putByte(c byte) { e.b = append(e.b, c) }
func (e *encbuf) putBE32int(x int) { e.putBE32(uint32(x)) }
func (e *encbuf) putBE64int(x int) { e.putBE64(uint64(x)) }
func (e *encbuf) putBE64int64(x int64) { e.putBE64(uint64(x)) }
func (e *encbuf) putUvarint32(x uint32) { e.putUvarint64(uint64(x)) }
func (e *encbuf) putUvarint(x int) { e.putUvarint64(uint64(x)) }
@ -142,10 +139,8 @@ func newDecbufUvarintAt(bs ByteSlice, off int) decbuf {
return dec
}
func (d *decbuf) uvarint() int { return int(d.uvarint64()) }
func (d *decbuf) uvarint32() uint32 { return uint32(d.uvarint64()) }
func (d *decbuf) be32int() int { return int(d.be32()) }
func (d *decbuf) be64int64() int64 { return int64(d.be64()) }
func (d *decbuf) uvarint() int { return int(d.uvarint64()) }
func (d *decbuf) be32int() int { return int(d.be32()) }
// crc32 returns a CRC32 checksum over the remaining bytes.
func (d *decbuf) crc32() uint32 {
@ -218,31 +213,6 @@ func (d *decbuf) be32() uint32 {
return x
}
func (d *decbuf) byte() byte {
if d.e != nil {
return 0
}
if len(d.b) < 1 {
d.e = errInvalidSize
return 0
}
x := d.b[0]
d.b = d.b[1:]
return x
}
func (d *decbuf) decbuf(l int) decbuf {
if d.e != nil {
return decbuf{e: d.e}
}
if l > len(d.b) {
return decbuf{e: errInvalidSize}
}
r := decbuf{b: d.b[:l]}
d.b = d.b[l:]
return r
}
func (d *decbuf) err() error { return d.e }
func (d *decbuf) len() int { return len(d.b) }
func (d *decbuf) get() []byte { return d.b }

View File

@ -752,7 +752,7 @@ func ReadSymbols(bs ByteSlice, version int, off int) ([]string, map[uint32]strin
symbolSlice []string
symbols = map[uint32]string{}
)
if version == 2 {
if version == FormatV2 {
symbolSlice = make([]string, 0, cnt)
}

View File

@ -305,9 +305,8 @@ func Intersect(its ...Postings) Postings {
}
type intersectPostings struct {
a, b Postings
aok, bok bool
cur uint64
a, b Postings
cur uint64
}
func newIntersectPostings(a, b Postings) *intersectPostings {

View File

@ -61,18 +61,6 @@ func TestMemPostings_ensureOrder(t *testing.T) {
}
}
type mockPostings struct {
next func() bool
seek func(uint64) bool
value func() uint64
err func() error
}
func (m *mockPostings) Next() bool { return m.next() }
func (m *mockPostings) Seek(v uint64) bool { return m.seek(v) }
func (m *mockPostings) Value() uint64 { return m.value() }
func (m *mockPostings) Err() error { return m.err() }
func TestIntersect(t *testing.T) {
var cases = []struct {
a, b []uint64
@ -300,8 +288,6 @@ func TestMergedPostingsSeek(t *testing.T) {
testutil.Equals(t, c.res, lst)
}
}
return
}
func TestRemovedPostings(t *testing.T) {
@ -463,8 +449,6 @@ func TestRemovedPostingsSeek(t *testing.T) {
testutil.Equals(t, c.res, lst)
}
}
return
}
func TestBigEndian(t *testing.T) {

View File

@ -56,18 +56,6 @@ func newMockSeriesSet(list []Series) *mockSeriesSet {
}
}
type mockSeriesIterator struct {
seek func(int64) bool
at func() (int64, float64)
next func() bool
err func() error
}
func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) }
func (m *mockSeriesIterator) At() (int64, float64) { return m.at() }
func (m *mockSeriesIterator) Next() bool { return m.next() }
func (m *mockSeriesIterator) Err() error { return m.err() }
func TestMergedSeriesSet(t *testing.T) {
cases := []struct {
@ -399,8 +387,6 @@ Outer:
testutil.Equals(t, smplExp, smplRes)
}
}
return
}
func TestBlockQuerierDelete(t *testing.T) {
@ -563,8 +549,6 @@ Outer:
testutil.Equals(t, smplExp, smplRes)
}
}
return
}
func TestBaseChunkSeries(t *testing.T) {
@ -661,8 +645,6 @@ func TestBaseChunkSeries(t *testing.T) {
testutil.Equals(t, len(tc.expIdxs), i)
testutil.Ok(t, bcs.Err())
}
return
}
// TODO: Remove after simpleSeries is merged
@ -986,8 +968,6 @@ func TestSeriesIterator(t *testing.T) {
}
})
})
return
}
// Regression for: https://github.com/prometheus/tsdb/pull/97
@ -1088,7 +1068,6 @@ func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
maxt: 15,
}
testutil.Assert(t, p.Next() == false, "")
return
}
type mockChunkSeriesSet struct {

View File

@ -1,2 +0,0 @@
# Enable only "legacy" staticcheck verifications.
checks = [ "SA*" ]

View File

@ -120,7 +120,6 @@ func TestAddingNewIntervals(t *testing.T) {
testutil.Equals(t, c.exp, c.exist.add(c.new))
}
return
}
// TestMemTombstonesConcurrency to make sure they are safe to access from different goroutines.

View File

@ -226,7 +226,6 @@ func TestReader_Live(t *testing.T) {
func TestWAL_FuzzWriteRead_Live(t *testing.T) {
const count = 5000
const segmentSize = int64(128 * 1024 * 1204)
var input [][]byte
lock := sync.RWMutex{}
var recs [][]byte