mirror of https://github.com/prometheus/prometheus
Browse Source
This removes the dependancy on C leveldb and snappy. It also takes care of fewer dependencies as they would anyway not work on any non-Debian, non-Brew system. Change-Id: Ia70dce1ba8a816a003587927e0b3a3f8ad2fd28cpull/413/head
Bjoern Rabenstein
10 years ago
25 changed files with 52 additions and 1003 deletions
@ -1,48 +0,0 @@ |
|||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES: |
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE |
|
||||||
|
|
||||||
all: populate |
|
||||||
|
|
||||||
populate: leveldb-$(LEVELDB_VERSION).tar.gz protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2 snappy-$(SNAPPY_VERSION).tar.gz |
|
||||||
|
|
||||||
leveldb-$(LEVELDB_VERSION).tar.gz: wget-stamp |
|
||||||
$(WGET) http://leveldb.googlecode.com/files/leveldb-$(LEVELDB_VERSION).tar.gz
|
|
||||||
|
|
||||||
protobuf-$(PROTOCOL_BUFFERS_VERSION).tar.bz2: wget-stamp |
|
||||||
$(WGET) http://protobuf.googlecode.com/files/$@
|
|
||||||
|
|
||||||
snappy-$(SNAPPY_VERSION).tar.gz: wget-stamp |
|
||||||
$(WGET) http://snappy.googlecode.com/files/snappy-$(SNAPPY_VERSION).tar.gz
|
|
||||||
|
|
||||||
wget-implementation-Darwin-stamp: |
|
||||||
[ -x "$$(which wget)" ] || $(BREW_INSTALL) wget
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
wget-implementation-Linux-stamp: |
|
||||||
[ -x "$$(which wget)" ] || $(APT_GET_INSTALL) wget
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
wget-stamp: wget-implementation-$(UNAME)-stamp |
|
||||||
[ -x "$$(which wget)" ] || { echo "wget not found." ; false ; }
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
clean: |
|
||||||
-[ -n "$(REALLY_CLEAN)" ] && rm -rf *.bz2
|
|
||||||
-[ -n "$(REALLY_CLEAN)" ] && rm -rf *.gz
|
|
||||||
rm -rf *-stamp
|
|
||||||
|
|
||||||
.PHONY: clean populate |
|
@ -1,22 +0,0 @@ |
|||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES: |
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE |
|
||||||
|
|
||||||
all: |
|
||||||
|
|
||||||
clean: |
|
||||||
rm -rf *
|
|
||||||
git checkout .
|
|
@ -1,22 +0,0 @@ |
|||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
.SUFFIXES: |
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE |
|
||||||
|
|
||||||
all: |
|
||||||
|
|
||||||
clean: |
|
||||||
rm -rf *
|
|
||||||
git checkout .
|
|
@ -1,29 +0,0 @@ |
|||||||
#!/usr/bin/env bash |
|
||||||
|
|
||||||
# If either of the two tests below fail, you may need to install GNU coreutils |
|
||||||
# in your environment. |
|
||||||
|
|
||||||
if [ ! -x "$(which readlink)" ]; then |
|
||||||
echo "readlink tool cannot be found." > /dev/stderr |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
|
|
||||||
if [ ! -x "$(which dirname)" ]; then |
|
||||||
echo "dirname tool cannot be found." > /dev/stderr |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
|
|
||||||
readonly binary="${0}" |
|
||||||
readonly binary_path="$(readlink -f ${binary})" |
|
||||||
readonly binary_directory="$(dirname ${binary_path})" |
|
||||||
|
|
||||||
readonly platform=$(uname | tr '[:upper:]' '[:lower:]') |
|
||||||
|
|
||||||
|
|
||||||
export LD_LIBRARY_PATH="${binary_directory}/lib:${LD_LIBRARY_PATH}" |
|
||||||
|
|
||||||
if [[ "${platform}" == "darwin" ]]; then |
|
||||||
export DYLD_LIBRARY_PATH="${binary_directory}/lib:${DYLD_LIBRARY_PATH}" |
|
||||||
fi |
|
||||||
|
|
||||||
exec "${binary_directory}/prometheus" "${@}" |
|
@ -1,31 +0,0 @@ |
|||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
all: generated/data.pb.go generated/descriptor.blob |
|
||||||
|
|
||||||
SUFFIXES: |
|
||||||
|
|
||||||
include ../Makefile.INCLUDE |
|
||||||
|
|
||||||
# In order to build the generated targets in this directory, run the
|
|
||||||
# following:
|
|
||||||
#
|
|
||||||
# make -C .build goprotobuf-protoc-gen-go-stamp
|
|
||||||
|
|
||||||
generated/data.pb.go: data.proto |
|
||||||
$(MAKE) -C ../.build goprotobuf-protoc-gen-go-stamp
|
|
||||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --include_imports --go_out=generated/ --descriptor_set_out=generated/descriptor.blob data.proto
|
|
||||||
|
|
||||||
generated/descriptor.blob: data.proto |
|
||||||
$(MAKE) -C ../.build goprotobuf-protoc-gen-go-stamp
|
|
||||||
$(PROTOC) --proto_path=$(PREFIX)/include:. --include_imports --go_out=generated/ --descriptor_set_out=generated/descriptor.blob data.proto
|
|
@ -1,125 +0,0 @@ |
|||||||
// Copyright 2013 Prometheus Team |
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
|
||||||
// you may not use this file except in compliance with the License. |
|
||||||
// You may obtain a copy of the License at |
|
||||||
// |
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0 |
|
||||||
// |
|
||||||
// Unless required by applicable law or agreed to in writing, software |
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
// See the License for the specific language governing permissions and |
|
||||||
// limitations under the License. |
|
||||||
|
|
||||||
package io.prometheus; |
|
||||||
|
|
||||||
import "google/protobuf/descriptor.proto"; |
|
||||||
|
|
||||||
message LabelPair { |
|
||||||
optional string name = 1; |
|
||||||
optional string value = 2; |
|
||||||
} |
|
||||||
|
|
||||||
message LabelName { |
|
||||||
optional string name = 1; |
|
||||||
} |
|
||||||
|
|
||||||
message LabelValueCollection { |
|
||||||
repeated string member = 1; |
|
||||||
} |
|
||||||
|
|
||||||
message Metric { |
|
||||||
repeated LabelPair label_pair = 1; |
|
||||||
} |
|
||||||
|
|
||||||
message Fingerprint { |
|
||||||
optional string signature = 1; |
|
||||||
} |
|
||||||
|
|
||||||
message FingerprintCollection { |
|
||||||
repeated Fingerprint member = 1; |
|
||||||
} |
|
||||||
|
|
||||||
message LabelSet { |
|
||||||
repeated LabelPair member = 1; |
|
||||||
} |
|
||||||
|
|
||||||
// The default LevelDB comparator sorts not only lexicographically, but also by |
|
||||||
// key length (which takes precedence). Thus, no variable-length fields may be |
|
||||||
// introduced into the key definition below. |
|
||||||
message SampleKey { |
|
||||||
optional Fingerprint fingerprint = 1; |
|
||||||
optional bytes timestamp = 2; |
|
||||||
optional sfixed64 last_timestamp = 3; |
|
||||||
optional fixed32 sample_count = 4; |
|
||||||
} |
|
||||||
|
|
||||||
message MembershipIndexValue { |
|
||||||
} |
|
||||||
|
|
||||||
message MetricHighWatermark { |
|
||||||
optional int64 timestamp = 1; |
|
||||||
} |
|
||||||
|
|
||||||
// CompactionProcessorDefinition models a curation process across the sample |
|
||||||
// corpus that ensures that sparse samples. |
|
||||||
message CompactionProcessorDefinition { |
|
||||||
// minimum_group_size identifies how minimally samples should be grouped |
|
||||||
// together to write a new samples chunk. |
|
||||||
optional uint32 minimum_group_size = 1; |
|
||||||
} |
|
||||||
|
|
||||||
// CurationKey models the state of curation for a given metric fingerprint and |
|
||||||
// its associated samples. The time series database only knows about compaction |
|
||||||
// and resampling behaviors that are explicitly defined to it in its runtime |
|
||||||
// configuration, meaning it never scans on-disk tables for CurationKey |
|
||||||
// policies; rather, it looks up via the CurationKey tuple to find out what the |
|
||||||
// effectuation state for a given metric fingerprint is. |
|
||||||
// |
|
||||||
// For instance, how far along as a rule for (Fingerprint A, Samples Older Than |
|
||||||
// B, and Curation Processor) has been effectuated on-disk. |
|
||||||
message CurationKey { |
|
||||||
// fingerprint identifies the fingerprint for the given policy. |
|
||||||
optional Fingerprint fingerprint = 1; |
|
||||||
|
|
||||||
// processor_message_type_name identifies the underlying message type that |
|
||||||
// was used to encode processor_message_raw. |
|
||||||
optional string processor_message_type_name = 2; |
|
||||||
|
|
||||||
// processor_message_raw identifies the serialized ProcessorSignature for this |
|
||||||
// operation. |
|
||||||
optional bytes processor_message_raw = 3; |
|
||||||
|
|
||||||
// ignore_younger_than represents in seconds relative to when the curation |
|
||||||
// cycle start when the curator should stop operating. For instance, if |
|
||||||
// the curation cycle starts at time T and the curation remark dictates that |
|
||||||
// the curation should starts processing samples at time S, the curator should |
|
||||||
// work from S until ignore_younger_than seconds before T: |
|
||||||
// |
|
||||||
// PAST NOW FUTURE |
|
||||||
// |
|
||||||
// S--------------->|----------T |
|
||||||
// |---IYT----| |
|
||||||
// |
|
||||||
// [Curation Resumption Time (S), T - IYT) |
|
||||||
optional int64 ignore_younger_than = 4; |
|
||||||
|
|
||||||
// This could be populated by decoding the generated descriptor file into a |
|
||||||
// FileDescriptorSet message and extracting the type definition for the given |
|
||||||
// message schema that describes processor_message_type_name. |
|
||||||
// |
|
||||||
// optional google.protobuf.DescriptorProto processor_message_type_descriptor_raw = 5; |
|
||||||
} |
|
||||||
|
|
||||||
// CurationValue models the progress for a given CurationKey. |
|
||||||
message CurationValue { |
|
||||||
// last_completion_timestamp represents the seconds since the epoch UTC at |
|
||||||
// which the curator last completed its duty cycle for a given metric |
|
||||||
// fingerprint. |
|
||||||
optional int64 last_completion_timestamp = 1; |
|
||||||
} |
|
||||||
|
|
||||||
// DeletionProcessorDefinition models a curation process across the sample |
|
||||||
// corpus that deletes old values. |
|
||||||
message DeletionProcessorDefinition { |
|
||||||
} |
|
@ -1,344 +0,0 @@ |
|||||||
// Code generated by protoc-gen-go.
|
|
||||||
// source: data.proto
|
|
||||||
// DO NOT EDIT!
|
|
||||||
|
|
||||||
/* |
|
||||||
Package io_prometheus is a generated protocol buffer package. |
|
||||||
|
|
||||||
It is generated from these files: |
|
||||||
data.proto |
|
||||||
|
|
||||||
It has these top-level messages: |
|
||||||
LabelPair |
|
||||||
LabelName |
|
||||||
LabelValueCollection |
|
||||||
Metric |
|
||||||
Fingerprint |
|
||||||
FingerprintCollection |
|
||||||
LabelSet |
|
||||||
SampleKey |
|
||||||
MembershipIndexValue |
|
||||||
MetricHighWatermark |
|
||||||
CompactionProcessorDefinition |
|
||||||
CurationKey |
|
||||||
CurationValue |
|
||||||
DeletionProcessorDefinition |
|
||||||
*/ |
|
||||||
package io_prometheus |
|
||||||
|
|
||||||
import proto "code.google.com/p/goprotobuf/proto" |
|
||||||
import json "encoding/json" |
|
||||||
import math "math" |
|
||||||
|
|
||||||
// discarding unused import google_protobuf "google/protobuf/descriptor.pb"
|
|
||||||
|
|
||||||
// Reference proto, json, and math imports to suppress error if they are not otherwise used.
|
|
||||||
var _ = proto.Marshal |
|
||||||
var _ = &json.SyntaxError{} |
|
||||||
var _ = math.Inf |
|
||||||
|
|
||||||
type LabelPair struct { |
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` |
|
||||||
Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *LabelPair) Reset() { *m = LabelPair{} } |
|
||||||
func (m *LabelPair) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*LabelPair) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *LabelPair) GetName() string { |
|
||||||
if m != nil && m.Name != nil { |
|
||||||
return *m.Name |
|
||||||
} |
|
||||||
return "" |
|
||||||
} |
|
||||||
|
|
||||||
func (m *LabelPair) GetValue() string { |
|
||||||
if m != nil && m.Value != nil { |
|
||||||
return *m.Value |
|
||||||
} |
|
||||||
return "" |
|
||||||
} |
|
||||||
|
|
||||||
type LabelName struct { |
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *LabelName) Reset() { *m = LabelName{} } |
|
||||||
func (m *LabelName) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*LabelName) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *LabelName) GetName() string { |
|
||||||
if m != nil && m.Name != nil { |
|
||||||
return *m.Name |
|
||||||
} |
|
||||||
return "" |
|
||||||
} |
|
||||||
|
|
||||||
type LabelValueCollection struct { |
|
||||||
Member []string `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *LabelValueCollection) Reset() { *m = LabelValueCollection{} } |
|
||||||
func (m *LabelValueCollection) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*LabelValueCollection) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *LabelValueCollection) GetMember() []string { |
|
||||||
if m != nil { |
|
||||||
return m.Member |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
type Metric struct { |
|
||||||
LabelPair []*LabelPair `protobuf:"bytes,1,rep,name=label_pair" json:"label_pair,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *Metric) Reset() { *m = Metric{} } |
|
||||||
func (m *Metric) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*Metric) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *Metric) GetLabelPair() []*LabelPair { |
|
||||||
if m != nil { |
|
||||||
return m.LabelPair |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
type Fingerprint struct { |
|
||||||
Signature *string `protobuf:"bytes,1,opt,name=signature" json:"signature,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *Fingerprint) Reset() { *m = Fingerprint{} } |
|
||||||
func (m *Fingerprint) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*Fingerprint) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *Fingerprint) GetSignature() string { |
|
||||||
if m != nil && m.Signature != nil { |
|
||||||
return *m.Signature |
|
||||||
} |
|
||||||
return "" |
|
||||||
} |
|
||||||
|
|
||||||
type FingerprintCollection struct { |
|
||||||
Member []*Fingerprint `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *FingerprintCollection) Reset() { *m = FingerprintCollection{} } |
|
||||||
func (m *FingerprintCollection) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*FingerprintCollection) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *FingerprintCollection) GetMember() []*Fingerprint { |
|
||||||
if m != nil { |
|
||||||
return m.Member |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
type LabelSet struct { |
|
||||||
Member []*LabelPair `protobuf:"bytes,1,rep,name=member" json:"member,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *LabelSet) Reset() { *m = LabelSet{} } |
|
||||||
func (m *LabelSet) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*LabelSet) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *LabelSet) GetMember() []*LabelPair { |
|
||||||
if m != nil { |
|
||||||
return m.Member |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
// The default LevelDB comparator sorts not only lexicographically, but also by
|
|
||||||
// key length (which takes precedence). Thus, no variable-length fields may be
|
|
||||||
// introduced into the key definition below.
|
|
||||||
type SampleKey struct { |
|
||||||
Fingerprint *Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"` |
|
||||||
Timestamp []byte `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp,omitempty"` |
|
||||||
LastTimestamp *int64 `protobuf:"fixed64,3,opt,name=last_timestamp" json:"last_timestamp,omitempty"` |
|
||||||
SampleCount *uint32 `protobuf:"fixed32,4,opt,name=sample_count" json:"sample_count,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *SampleKey) Reset() { *m = SampleKey{} } |
|
||||||
func (m *SampleKey) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*SampleKey) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *SampleKey) GetFingerprint() *Fingerprint { |
|
||||||
if m != nil { |
|
||||||
return m.Fingerprint |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
func (m *SampleKey) GetTimestamp() []byte { |
|
||||||
if m != nil { |
|
||||||
return m.Timestamp |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
func (m *SampleKey) GetLastTimestamp() int64 { |
|
||||||
if m != nil && m.LastTimestamp != nil { |
|
||||||
return *m.LastTimestamp |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
func (m *SampleKey) GetSampleCount() uint32 { |
|
||||||
if m != nil && m.SampleCount != nil { |
|
||||||
return *m.SampleCount |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
type MembershipIndexValue struct { |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *MembershipIndexValue) Reset() { *m = MembershipIndexValue{} } |
|
||||||
func (m *MembershipIndexValue) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*MembershipIndexValue) ProtoMessage() {} |
|
||||||
|
|
||||||
type MetricHighWatermark struct { |
|
||||||
Timestamp *int64 `protobuf:"varint,1,opt,name=timestamp" json:"timestamp,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *MetricHighWatermark) Reset() { *m = MetricHighWatermark{} } |
|
||||||
func (m *MetricHighWatermark) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*MetricHighWatermark) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *MetricHighWatermark) GetTimestamp() int64 { |
|
||||||
if m != nil && m.Timestamp != nil { |
|
||||||
return *m.Timestamp |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
// CompactionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that ensures that sparse samples.
|
|
||||||
type CompactionProcessorDefinition struct { |
|
||||||
// minimum_group_size identifies how minimally samples should be grouped
|
|
||||||
// together to write a new samples chunk.
|
|
||||||
MinimumGroupSize *uint32 `protobuf:"varint,1,opt,name=minimum_group_size" json:"minimum_group_size,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CompactionProcessorDefinition) Reset() { *m = CompactionProcessorDefinition{} } |
|
||||||
func (m *CompactionProcessorDefinition) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*CompactionProcessorDefinition) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *CompactionProcessorDefinition) GetMinimumGroupSize() uint32 { |
|
||||||
if m != nil && m.MinimumGroupSize != nil { |
|
||||||
return *m.MinimumGroupSize |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
// CurationKey models the state of curation for a given metric fingerprint and
|
|
||||||
// its associated samples. The time series database only knows about compaction
|
|
||||||
// and resampling behaviors that are explicitly defined to it in its runtime
|
|
||||||
// configuration, meaning it never scans on-disk tables for CurationKey
|
|
||||||
// policies; rather, it looks up via the CurationKey tuple to find out what the
|
|
||||||
// effectuation state for a given metric fingerprint is.
|
|
||||||
//
|
|
||||||
// For instance, how far along as a rule for (Fingerprint A, Samples Older Than
|
|
||||||
// B, and Curation Processor) has been effectuated on-disk.
|
|
||||||
type CurationKey struct { |
|
||||||
// fingerprint identifies the fingerprint for the given policy.
|
|
||||||
Fingerprint *Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"` |
|
||||||
// processor_message_type_name identifies the underlying message type that
|
|
||||||
// was used to encode processor_message_raw.
|
|
||||||
ProcessorMessageTypeName *string `protobuf:"bytes,2,opt,name=processor_message_type_name" json:"processor_message_type_name,omitempty"` |
|
||||||
// processor_message_raw identifies the serialized ProcessorSignature for this
|
|
||||||
// operation.
|
|
||||||
ProcessorMessageRaw []byte `protobuf:"bytes,3,opt,name=processor_message_raw" json:"processor_message_raw,omitempty"` |
|
||||||
// ignore_younger_than represents in seconds relative to when the curation
|
|
||||||
// cycle start when the curator should stop operating. For instance, if
|
|
||||||
// the curation cycle starts at time T and the curation remark dictates that
|
|
||||||
// the curation should starts processing samples at time S, the curator should
|
|
||||||
// work from S until ignore_younger_than seconds before T:
|
|
||||||
//
|
|
||||||
// PAST NOW FUTURE
|
|
||||||
//
|
|
||||||
// S--------------->|----------T
|
|
||||||
// |---IYT----|
|
|
||||||
//
|
|
||||||
// [Curation Resumption Time (S), T - IYT)
|
|
||||||
IgnoreYoungerThan *int64 `protobuf:"varint,4,opt,name=ignore_younger_than" json:"ignore_younger_than,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CurationKey) Reset() { *m = CurationKey{} } |
|
||||||
func (m *CurationKey) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*CurationKey) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *CurationKey) GetFingerprint() *Fingerprint { |
|
||||||
if m != nil { |
|
||||||
return m.Fingerprint |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CurationKey) GetProcessorMessageTypeName() string { |
|
||||||
if m != nil && m.ProcessorMessageTypeName != nil { |
|
||||||
return *m.ProcessorMessageTypeName |
|
||||||
} |
|
||||||
return "" |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CurationKey) GetProcessorMessageRaw() []byte { |
|
||||||
if m != nil { |
|
||||||
return m.ProcessorMessageRaw |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CurationKey) GetIgnoreYoungerThan() int64 { |
|
||||||
if m != nil && m.IgnoreYoungerThan != nil { |
|
||||||
return *m.IgnoreYoungerThan |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
// CurationValue models the progress for a given CurationKey.
|
|
||||||
type CurationValue struct { |
|
||||||
// last_completion_timestamp represents the seconds since the epoch UTC at
|
|
||||||
// which the curator last completed its duty cycle for a given metric
|
|
||||||
// fingerprint.
|
|
||||||
LastCompletionTimestamp *int64 `protobuf:"varint,1,opt,name=last_completion_timestamp" json:"last_completion_timestamp,omitempty"` |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *CurationValue) Reset() { *m = CurationValue{} } |
|
||||||
func (m *CurationValue) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*CurationValue) ProtoMessage() {} |
|
||||||
|
|
||||||
func (m *CurationValue) GetLastCompletionTimestamp() int64 { |
|
||||||
if m != nil && m.LastCompletionTimestamp != nil { |
|
||||||
return *m.LastCompletionTimestamp |
|
||||||
} |
|
||||||
return 0 |
|
||||||
} |
|
||||||
|
|
||||||
// DeletionProcessorDefinition models a curation process across the sample
|
|
||||||
// corpus that deletes old values.
|
|
||||||
type DeletionProcessorDefinition struct { |
|
||||||
XXX_unrecognized []byte `json:"-"` |
|
||||||
} |
|
||||||
|
|
||||||
func (m *DeletionProcessorDefinition) Reset() { *m = DeletionProcessorDefinition{} } |
|
||||||
func (m *DeletionProcessorDefinition) String() string { return proto.CompactTextString(m) } |
|
||||||
func (*DeletionProcessorDefinition) ProtoMessage() {} |
|
||||||
|
|
||||||
func init() { |
|
||||||
} |
|
Binary file not shown.
@ -1,28 +0,0 @@ |
|||||||
# Copyright 2013 Prometheus Team
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
MAKE_ARTIFACTS = dumper
|
|
||||||
|
|
||||||
all: dumper |
|
||||||
|
|
||||||
SUFFIXES: |
|
||||||
|
|
||||||
include ../../Makefile.INCLUDE |
|
||||||
|
|
||||||
dumper: $(shell find . -iname '*.go') |
|
||||||
$(GO) build -o dumper .
|
|
||||||
|
|
||||||
clean: |
|
||||||
rm -rf $(MAKE_ARTIFACTS)
|
|
||||||
|
|
||||||
.PHONY: clean |
|
@ -1,105 +0,0 @@ |
|||||||
// Copyright 2013 Prometheus Team
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
// Dumper is responsible for dumping all samples along with metadata contained
|
|
||||||
// in a given Prometheus metrics storage. It prints samples in unquoted CSV
|
|
||||||
// format, with commas as field separators:
|
|
||||||
//
|
|
||||||
// <fingerprint>,<chunk_first_time>,<chunk_last_time>,<chunk_sample_count>,<chunk_index>,<timestamp>,<value>
|
|
||||||
package main |
|
||||||
|
|
||||||
/* |
|
||||||
import ( |
|
||||||
"encoding/csv" |
|
||||||
"flag" |
|
||||||
"fmt" |
|
||||||
"os" |
|
||||||
"strconv" |
|
||||||
|
|
||||||
"github.com/golang/glog" |
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/storage" |
|
||||||
"github.com/prometheus/prometheus/storage/metric" |
|
||||||
"github.com/prometheus/prometheus/storage/metric/tiered" |
|
||||||
) |
|
||||||
|
|
||||||
var ( |
|
||||||
storageRoot = flag.String("storage.root", "", "The path to the storage root for Prometheus.") |
|
||||||
dieOnBadChunk = flag.Bool("dieOnBadChunk", false, "Whether to die upon encountering a bad chunk.") |
|
||||||
) |
|
||||||
|
|
||||||
type SamplesDumper struct { |
|
||||||
*csv.Writer |
|
||||||
} |
|
||||||
|
|
||||||
func (d *SamplesDumper) Operate(key, value interface{}) *storage.OperatorError { |
|
||||||
sampleKey := key.(*tiered.SampleKey) |
|
||||||
if *dieOnBadChunk && sampleKey.FirstTimestamp.After(sampleKey.LastTimestamp) { |
|
||||||
glog.Fatalf("Chunk: First time (%v) after last time (%v): %v\n", sampleKey.FirstTimestamp.Unix(), sampleKey.LastTimestamp.Unix(), sampleKey) |
|
||||||
} |
|
||||||
for i, sample := range value.(metric.Values) { |
|
||||||
if *dieOnBadChunk && (sample.Timestamp.Before(sampleKey.FirstTimestamp) || sample.Timestamp.After(sampleKey.LastTimestamp)) { |
|
||||||
glog.Fatalf("Sample not within chunk boundaries: chunk FirstTimestamp (%v), chunk LastTimestamp (%v) vs. sample Timestamp (%v)\n", sampleKey.FirstTimestamp.Unix(), sampleKey.LastTimestamp.Unix(), sample.Timestamp) |
|
||||||
} |
|
||||||
d.Write([]string{ |
|
||||||
sampleKey.Fingerprint.String(), |
|
||||||
strconv.FormatInt(sampleKey.FirstTimestamp.Unix(), 10), |
|
||||||
strconv.FormatInt(sampleKey.LastTimestamp.Unix(), 10), |
|
||||||
strconv.FormatUint(uint64(sampleKey.SampleCount), 10), |
|
||||||
strconv.Itoa(i), |
|
||||||
strconv.FormatInt(sample.Timestamp.Unix(), 10), |
|
||||||
fmt.Sprintf("%v", sample.Value), |
|
||||||
}) |
|
||||||
if err := d.Error(); err != nil { |
|
||||||
return &storage.OperatorError{ |
|
||||||
Error: err, |
|
||||||
Continuable: false, |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
return nil |
|
||||||
} |
|
||||||
|
|
||||||
func main() { |
|
||||||
flag.Parse() |
|
||||||
|
|
||||||
if storageRoot == nil || *storageRoot == "" { |
|
||||||
glog.Fatal("Must provide a path...") |
|
||||||
} |
|
||||||
|
|
||||||
persistence, err := tiered.NewLevelDBPersistence(*storageRoot) |
|
||||||
if err != nil { |
|
||||||
glog.Fatal(err) |
|
||||||
} |
|
||||||
defer persistence.Close() |
|
||||||
|
|
||||||
dumper := &SamplesDumper{ |
|
||||||
csv.NewWriter(os.Stdout), |
|
||||||
} |
|
||||||
|
|
||||||
entire, err := persistence.MetricSamples.ForEach(&tiered.MetricSamplesDecoder{}, &tiered.AcceptAllFilter{}, dumper) |
|
||||||
if err != nil { |
|
||||||
glog.Fatal("Error dumping samples: ", err) |
|
||||||
} |
|
||||||
if !entire { |
|
||||||
glog.Fatal("Didn't scan entire corpus") |
|
||||||
} |
|
||||||
dumper.Flush() |
|
||||||
if err = dumper.Error(); err != nil { |
|
||||||
glog.Fatal("Error flushing CSV: ", err) |
|
||||||
} |
|
||||||
} |
|
||||||
*/ |
|
||||||
|
|
||||||
func main() { |
|
||||||
} |
|
Loading…
Reference in new issue