rebased and resolved conflicts with the new Discovery GUI page

Signed-off-by: Krasi Georgiev <krasi.root@gmail.com>
pull/3362/head
Krasi Georgiev 7 years ago
parent 60ef2016d5
commit 587dec9eb9

@ -238,11 +238,11 @@ func main() {
discoveryManager = discovery.NewManager(log.With(logger, "component", "discovery manager")) discoveryManager = discovery.NewManager(log.With(logger, "component", "discovery manager"))
scrapeManager = retrieval.NewScrapeManager(log.With(logger, "component", "scrape manager"), fanoutStorage) scrapeManager = retrieval.NewScrapeManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine) queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
ruleManager := rules.NewManager(&rules.ManagerOptions{ ruleManager = rules.NewManager(&rules.ManagerOptions{
Appendable: fanoutStorage, Appendable: fanoutStorage,
QueryFunc: rules.EngineQueryFunc(queryEngine), QueryFunc: rules.EngineQueryFunc(queryEngine),
NotifyFunc: sendAlerts(notifier, cfg.web.ExternalURL.String()), NotifyFunc: sendAlerts(notifier, cfg.web.ExternalURL.String()),
Context: ctx, Context: ctxRule,
ExternalURL: cfg.web.ExternalURL, ExternalURL: cfg.web.ExternalURL,
Registerer: prometheus.DefaultRegisterer, Registerer: prometheus.DefaultRegisterer,
Logger: log.With(logger, "component", "rule manager"), Logger: log.With(logger, "component", "rule manager"),
@ -271,7 +271,7 @@ func main() {
cfg.web.Flags[f.Name] = f.Value.String() cfg.web.Flags[f.Name] = f.Value.String()
} }
// Depend on cfg.web.ScrapeManager so needs to be after cfg.web.ScrapeManager = scrapeManager // Depends on cfg.web.ScrapeManager so needs to be after cfg.web.ScrapeManager = scrapeManager
webHandler := web.New(log.With(logger, "component", "web"), &cfg.web) webHandler := web.New(log.With(logger, "component", "web"), &cfg.web)
// Monitor outgoing connections on default transport with conntrack. // Monitor outgoing connections on default transport with conntrack.
@ -281,9 +281,9 @@ func main() {
reloaders := []func(cfg *config.Config) error{ reloaders := []func(cfg *config.Config) error{
remoteStorage.ApplyConfig, remoteStorage.ApplyConfig,
discoveryManager.ApplyConfig,
webHandler.ApplyConfig, webHandler.ApplyConfig,
notifier.ApplyConfig, notifier.ApplyConfig,
discoveryManager.ApplyConfig,
scrapeManager.ApplyConfig, scrapeManager.ApplyConfig,
func(cfg *config.Config) error { func(cfg *config.Config) error {
// Get all rule files matching the configuration oaths. // Get all rule files matching the configuration oaths.

@ -53,7 +53,7 @@ type Discoverer interface {
} }
type poolKey struct { type poolKey struct {
set string setName string
provider string provider string
} }
@ -111,7 +111,7 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error {
m.cancelDiscoverers() m.cancelDiscoverers()
for _, scfg := range cfg.ScrapeConfigs { for _, scfg := range cfg.ScrapeConfigs {
for provName, prov := range m.providersFromConfig(scfg.ServiceDiscoveryConfig) { for provName, prov := range m.providersFromConfig(scfg.ServiceDiscoveryConfig) {
m.startProvider(ctx, poolKey{set: scfg.JobName, provider: provName}, prov) m.startProvider(ctx, poolKey{setName: scfg.JobName, provider: provName}, prov)
} }
} }
close(err) close(err)
@ -184,7 +184,7 @@ func (m *Manager) allGroups(pk poolKey) map[string][]*config.TargetGroup {
for _, pk := range pKeys { for _, pk := range pKeys {
for _, tg := range m.targets[pk] { for _, tg := range m.targets[pk] {
if tg.Source != "" { // Don't add empty targets. if tg.Source != "" { // Don't add empty targets.
tSetsAll[pk.set] = append(tSetsAll[pk.set], tg) tSetsAll[pk.setName] = append(tSetsAll[pk.setName], tg)
} }
} }
} }

@ -590,7 +590,7 @@ func TestDiscoveryManagerSyncCalls(t *testing.T) {
var totalUpdatesCount int var totalUpdatesCount int
for tpName, update := range testCase.updates { for tpName, update := range testCase.updates {
provider := newMockDiscoveryProvider(update) provider := newMockDiscoveryProvider(update)
discoveryManager.startProvider(ctx, poolKey{set: strconv.Itoa(testIndex), provider: tpName}, provider) discoveryManager.startProvider(ctx, poolKey{setName: strconv.Itoa(testIndex), provider: tpName}, provider)
if len(update) > 0 { if len(update) > 0 {
totalUpdatesCount = totalUpdatesCount + len(update) totalUpdatesCount = totalUpdatesCount + len(update)
@ -674,8 +674,8 @@ scrape_configs:
discoveryManager.ApplyConfig(cfg) discoveryManager.ApplyConfig(cfg)
_ = <-discoveryManager.SyncCh() _ = <-discoveryManager.SyncCh()
verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true)
verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", true) verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", true)
sTwo := ` sTwo := `
scrape_configs: scrape_configs:
@ -689,8 +689,8 @@ scrape_configs:
discoveryManager.ApplyConfig(cfg) discoveryManager.ApplyConfig(cfg)
_ = <-discoveryManager.SyncCh() _ = <-discoveryManager.SyncCh()
verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true)
verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", false) verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", false)
} }
type update struct { type update struct {

@ -92,20 +92,21 @@ func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error {
} }
// TargetMap returns map of active and dropped targets and their corresponding scrape config job name. // TargetMap returns map of active and dropped targets and their corresponding scrape config job name.
func (tm *TargetManager) TargetMap() map[string][]*Target { func (m *ScrapeManager) TargetMap() map[string][]*Target {
tm.mtx.RLock() targetsMap := make(chan map[string][]*Target)
defer tm.mtx.RUnlock() m.actionCh <- func() {
targets := make(map[string][]*Target)
targetsMap := make(map[string][]*Target) for jobName, sp := range m.scrapePools {
for jobName, ps := range tm.targetSets { sp.mtx.RLock()
ps.sp.mtx.RLock() for _, t := range sp.targets {
for _, t := range ps.sp.targets { targets[jobName] = append(targets[jobName], t)
targetsMap[jobName] = append(targetsMap[jobName], t) }
targets[jobName] = append(targets[jobName], sp.droppedTargets...)
sp.mtx.RUnlock()
} }
targetsMap[jobName] = append(targetsMap[jobName], ps.sp.droppedTargets...) targetsMap <- targets
ps.sp.mtx.RUnlock()
} }
return targetsMap return <-targetsMap
} }
// Targets returns the targets currently being scraped. // Targets returns the targets currently being scraped.

@ -587,7 +587,7 @@ func (h *Handler) rules(w http.ResponseWriter, r *http.Request) {
func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) { func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
var index []string var index []string
targets := h.targetManager.TargetMap() targets := h.scrapeManager.TargetMap()
for job := range targets { for job := range targets {
index = append(index, job) index = append(index, job)
} }

Loading…
Cancel
Save