2016-08-11 15:24:22 +00:00
|
|
|
/*
|
|
|
|
Copyright 2016 The Kubernetes Authors.
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"fmt"
|
2017-11-15 19:41:04 +00:00
|
|
|
"io/ioutil"
|
2016-08-11 15:24:22 +00:00
|
|
|
"net/http"
|
|
|
|
"strings"
|
|
|
|
"testing"
|
|
|
|
|
2016-11-21 02:55:31 +00:00
|
|
|
"net/url"
|
|
|
|
|
2017-10-31 08:15:32 +00:00
|
|
|
"k8s.io/api/core/v1"
|
2017-11-15 19:41:04 +00:00
|
|
|
"k8s.io/apimachinery/pkg/runtime"
|
2017-01-24 15:00:24 +00:00
|
|
|
"k8s.io/client-go/rest/fake"
|
2017-11-15 19:41:04 +00:00
|
|
|
core "k8s.io/client-go/testing"
|
2018-02-21 17:10:38 +00:00
|
|
|
"k8s.io/kubernetes/pkg/api/legacyscheme"
|
2016-10-18 22:53:26 +00:00
|
|
|
cmdtesting "k8s.io/kubernetes/pkg/kubectl/cmd/testing"
|
2018-02-21 17:10:38 +00:00
|
|
|
"k8s.io/kubernetes/pkg/kubectl/scheme"
|
2017-11-15 19:41:04 +00:00
|
|
|
metricsv1alpha1api "k8s.io/metrics/pkg/apis/metrics/v1alpha1"
|
|
|
|
metricsv1beta1api "k8s.io/metrics/pkg/apis/metrics/v1beta1"
|
|
|
|
metricsfake "k8s.io/metrics/pkg/client/clientset_generated/clientset/fake"
|
2016-08-11 15:24:22 +00:00
|
|
|
)
|
|
|
|
|
2016-08-17 12:40:18 +00:00
|
|
|
const (
|
|
|
|
apiPrefix = "api"
|
|
|
|
apiVersion = "v1"
|
|
|
|
)
|
|
|
|
|
2016-08-11 15:24:22 +00:00
|
|
|
func TestTopNodeAllMetrics(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
2017-11-15 19:41:04 +00:00
|
|
|
metrics, nodes := testNodeV1alpha1MetricsData()
|
2016-08-17 12:40:18 +00:00
|
|
|
expectedMetricsPath := fmt.Sprintf("%s/%s/nodes", baseMetricsAddress, metricsApiVersion)
|
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
|
2016-08-11 15:24:22 +00:00
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2016-08-11 15:24:22 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m := req.URL.Path, req.Method; {
|
2017-11-15 19:41:04 +00:00
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
|
2016-08-17 12:40:18 +00:00
|
|
|
case p == expectedMetricsPath && m == "GET":
|
2016-08-11 15:24:22 +00:00
|
|
|
body, err := marshallBody(metrics)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("unexpected error: %v", err)
|
|
|
|
}
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: body}, nil
|
2016-08-17 12:40:18 +00:00
|
|
|
case p == expectedNodePath && m == "GET":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, nodes)}, nil
|
2016-08-11 15:24:22 +00:00
|
|
|
default:
|
2016-08-17 12:40:18 +00:00
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedMetricsPath)
|
2016-08-11 15:24:22 +00:00
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2016-08-11 15:24:22 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2016-08-11 15:24:22 +00:00
|
|
|
cmd.Run(cmd, []string{})
|
|
|
|
|
2017-08-17 22:08:26 +00:00
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
|
|
|
for _, m := range metrics.Items {
|
|
|
|
if !strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTopNodeAllMetricsCustomDefaults(t *testing.T) {
|
|
|
|
customBaseHeapsterServiceAddress := "/api/v1/namespaces/custom-namespace/services/https:custom-heapster-service:/proxy"
|
|
|
|
customBaseMetricsAddress := customBaseHeapsterServiceAddress + "/apis/metrics"
|
|
|
|
|
|
|
|
initTestErrorHandler(t)
|
2017-11-15 19:41:04 +00:00
|
|
|
metrics, nodes := testNodeV1alpha1MetricsData()
|
2017-08-17 22:08:26 +00:00
|
|
|
expectedMetricsPath := fmt.Sprintf("%s/%s/nodes", customBaseMetricsAddress, metricsApiVersion)
|
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2017-08-17 22:08:26 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m := req.URL.Path, req.Method; {
|
2017-11-15 19:41:04 +00:00
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
|
2017-08-17 22:08:26 +00:00
|
|
|
case p == expectedMetricsPath && m == "GET":
|
|
|
|
body, err := marshallBody(metrics)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("unexpected error: %v", err)
|
|
|
|
}
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: body}, nil
|
|
|
|
case p == expectedNodePath && m == "GET":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, nodes)}, nil
|
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedMetricsPath)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2017-08-17 22:08:26 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
|
|
|
opts := &TopNodeOptions{
|
|
|
|
HeapsterOptions: HeapsterTopOptions{
|
|
|
|
Namespace: "custom-namespace",
|
|
|
|
Scheme: "https",
|
|
|
|
Service: "custom-heapster-service",
|
|
|
|
},
|
|
|
|
}
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, opts, buf)
|
2017-08-17 22:08:26 +00:00
|
|
|
cmd.Run(cmd, []string{})
|
|
|
|
|
2016-08-11 15:24:22 +00:00
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
2016-08-19 12:18:59 +00:00
|
|
|
for _, m := range metrics.Items {
|
2016-08-11 15:24:22 +00:00
|
|
|
if !strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTopNodeWithNameMetrics(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
2017-11-15 19:41:04 +00:00
|
|
|
metrics, nodes := testNodeV1alpha1MetricsData()
|
2016-08-19 12:18:59 +00:00
|
|
|
expectedMetrics := metrics.Items[0]
|
|
|
|
expectedNode := nodes.Items[0]
|
2017-11-15 19:41:04 +00:00
|
|
|
nonExpectedMetrics := metricsv1alpha1api.NodeMetricsList{
|
2016-08-19 12:18:59 +00:00
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[1:],
|
|
|
|
}
|
2016-08-11 15:24:22 +00:00
|
|
|
expectedPath := fmt.Sprintf("%s/%s/nodes/%s", baseMetricsAddress, metricsApiVersion, expectedMetrics.Name)
|
2016-08-17 12:40:18 +00:00
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes/%s", apiPrefix, apiVersion, expectedMetrics.Name)
|
2016-08-11 15:24:22 +00:00
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2016-08-11 15:24:22 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m := req.URL.Path, req.Method; {
|
2017-11-15 19:41:04 +00:00
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
|
2016-08-11 15:24:22 +00:00
|
|
|
case p == expectedPath && m == "GET":
|
|
|
|
body, err := marshallBody(expectedMetrics)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("unexpected error: %v", err)
|
|
|
|
}
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: body}, nil
|
2016-08-17 12:40:18 +00:00
|
|
|
case p == expectedNodePath && m == "GET":
|
2016-08-19 12:18:59 +00:00
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, &expectedNode)}, nil
|
2016-08-11 15:24:22 +00:00
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedPath)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2016-08-11 15:24:22 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2016-08-11 15:24:22 +00:00
|
|
|
cmd.Run(cmd, []string{expectedMetrics.Name})
|
|
|
|
|
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
|
|
|
if !strings.Contains(result, expectedMetrics.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", expectedMetrics.Name, result)
|
|
|
|
}
|
2016-08-19 12:18:59 +00:00
|
|
|
for _, m := range nonExpectedMetrics.Items {
|
2016-08-11 15:24:22 +00:00
|
|
|
if strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("unexpected metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTopNodeWithLabelSelectorMetrics(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
2017-11-15 19:41:04 +00:00
|
|
|
metrics, nodes := testNodeV1alpha1MetricsData()
|
|
|
|
expectedMetrics := metricsv1alpha1api.NodeMetricsList{
|
2016-08-19 12:18:59 +00:00
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[0:1],
|
|
|
|
}
|
2017-10-31 08:15:32 +00:00
|
|
|
expectedNodes := v1.NodeList{
|
2016-08-17 12:40:18 +00:00
|
|
|
ListMeta: nodes.ListMeta,
|
|
|
|
Items: nodes.Items[0:1],
|
|
|
|
}
|
2017-11-15 19:41:04 +00:00
|
|
|
nonExpectedMetrics := metricsv1alpha1api.NodeMetricsList{
|
2016-08-19 12:18:59 +00:00
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[1:],
|
|
|
|
}
|
2016-08-11 15:24:22 +00:00
|
|
|
label := "key=value"
|
|
|
|
expectedPath := fmt.Sprintf("%s/%s/nodes", baseMetricsAddress, metricsApiVersion)
|
|
|
|
expectedQuery := fmt.Sprintf("labelSelector=%s", url.QueryEscape(label))
|
2016-08-17 12:40:18 +00:00
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
|
2016-08-11 15:24:22 +00:00
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2016-08-11 15:24:22 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m, q := req.URL.Path, req.Method, req.URL.RawQuery; {
|
2017-11-15 19:41:04 +00:00
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
|
2016-08-11 15:24:22 +00:00
|
|
|
case p == expectedPath && m == "GET" && q == expectedQuery:
|
|
|
|
body, err := marshallBody(expectedMetrics)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("unexpected error: %v", err)
|
|
|
|
}
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: body}, nil
|
2016-08-17 12:40:18 +00:00
|
|
|
case p == expectedNodePath && m == "GET":
|
2016-08-19 12:18:59 +00:00
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, &expectedNodes)}, nil
|
2016-08-11 15:24:22 +00:00
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedPath)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2016-08-11 15:24:22 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2016-08-11 15:24:22 +00:00
|
|
|
cmd.Flags().Set("selector", label)
|
|
|
|
cmd.Run(cmd, []string{})
|
|
|
|
|
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
2016-08-19 12:18:59 +00:00
|
|
|
for _, m := range expectedMetrics.Items {
|
2016-08-11 15:24:22 +00:00
|
|
|
if !strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
2016-08-19 12:18:59 +00:00
|
|
|
for _, m := range nonExpectedMetrics.Items {
|
2016-08-11 15:24:22 +00:00
|
|
|
if strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("unexpected metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-15 19:41:04 +00:00
|
|
|
|
|
|
|
func TestTopNodeAllMetricsFromMetricsServer(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
|
|
|
expectedMetrics, nodes := testNodeV1beta1MetricsData()
|
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2017-11-15 19:41:04 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m := req.URL.Path, req.Method; {
|
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbodyWithMetrics)))}, nil
|
|
|
|
case p == expectedNodePath && m == "GET":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, nodes)}, nil
|
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\n", req, req.URL)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
fakemetricsClientset := &metricsfake.Clientset{}
|
|
|
|
fakemetricsClientset.AddReactor("list", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) {
|
|
|
|
return true, expectedMetrics, nil
|
|
|
|
})
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2017-11-15 19:41:04 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2018-02-21 16:01:09 +00:00
|
|
|
|
|
|
|
// TODO in the long run, we want to test most of our commands like this. Wire the options struct with specific mocks
|
|
|
|
// TODO then check the particular Run functionality and harvest results from fake clients
|
|
|
|
cmdOptions := &TopNodeOptions{}
|
2018-02-22 14:52:10 +00:00
|
|
|
if err := cmdOptions.Complete(tf, cmd, []string{}, buf); err != nil {
|
2018-02-21 16:01:09 +00:00
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
cmdOptions.MetricsClient = fakemetricsClientset
|
|
|
|
if err := cmdOptions.Validate(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
if err := cmdOptions.RunTopNode(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2017-11-15 19:41:04 +00:00
|
|
|
|
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
|
|
|
for _, m := range expectedMetrics.Items {
|
|
|
|
if !strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTopNodeWithNameMetricsFromMetricsServer(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
|
|
|
metrics, nodes := testNodeV1beta1MetricsData()
|
|
|
|
expectedMetrics := metrics.Items[0]
|
|
|
|
expectedNode := nodes.Items[0]
|
|
|
|
nonExpectedMetrics := metricsv1beta1api.NodeMetricsList{
|
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[1:],
|
|
|
|
}
|
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes/%s", apiPrefix, apiVersion, expectedMetrics.Name)
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2017-11-15 19:41:04 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m := req.URL.Path, req.Method; {
|
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbodyWithMetrics)))}, nil
|
|
|
|
case p == expectedNodePath && m == "GET":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, &expectedNode)}, nil
|
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\n", req, req.URL)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
fakemetricsClientset := &metricsfake.Clientset{}
|
|
|
|
fakemetricsClientset.AddReactor("get", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) {
|
|
|
|
return true, &expectedMetrics, nil
|
|
|
|
})
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2017-11-15 19:41:04 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2018-02-21 16:01:09 +00:00
|
|
|
|
|
|
|
// TODO in the long run, we want to test most of our commands like this. Wire the options struct with specific mocks
|
|
|
|
// TODO then check the particular Run functionality and harvest results from fake clients
|
|
|
|
cmdOptions := &TopNodeOptions{}
|
2018-02-22 14:52:10 +00:00
|
|
|
if err := cmdOptions.Complete(tf, cmd, []string{expectedMetrics.Name}, buf); err != nil {
|
2018-02-21 16:01:09 +00:00
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
cmdOptions.MetricsClient = fakemetricsClientset
|
|
|
|
if err := cmdOptions.Validate(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
if err := cmdOptions.RunTopNode(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2017-11-15 19:41:04 +00:00
|
|
|
|
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
|
|
|
if !strings.Contains(result, expectedMetrics.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", expectedMetrics.Name, result)
|
|
|
|
}
|
|
|
|
for _, m := range nonExpectedMetrics.Items {
|
|
|
|
if strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("unexpected metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTopNodeWithLabelSelectorMetricsFromMetricsServer(t *testing.T) {
|
|
|
|
initTestErrorHandler(t)
|
|
|
|
metrics, nodes := testNodeV1beta1MetricsData()
|
|
|
|
expectedMetrics := &metricsv1beta1api.NodeMetricsList{
|
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[0:1],
|
|
|
|
}
|
|
|
|
expectedNodes := v1.NodeList{
|
|
|
|
ListMeta: nodes.ListMeta,
|
|
|
|
Items: nodes.Items[0:1],
|
|
|
|
}
|
|
|
|
nonExpectedMetrics := &metricsv1beta1api.NodeMetricsList{
|
|
|
|
ListMeta: metrics.ListMeta,
|
|
|
|
Items: metrics.Items[1:],
|
|
|
|
}
|
|
|
|
label := "key=value"
|
|
|
|
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
tf := cmdtesting.NewTestFactory()
|
2018-02-21 17:10:38 +00:00
|
|
|
codec := legacyscheme.Codecs.LegacyCodec(scheme.Versions...)
|
|
|
|
ns := legacyscheme.Codecs
|
|
|
|
|
2017-11-15 19:41:04 +00:00
|
|
|
tf.Client = &fake.RESTClient{
|
|
|
|
NegotiatedSerializer: ns,
|
|
|
|
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
|
|
|
|
switch p, m, _ := req.URL.Path, req.Method, req.URL.RawQuery; {
|
|
|
|
case p == "/api":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
|
|
|
|
case p == "/apis":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbodyWithMetrics)))}, nil
|
|
|
|
case p == expectedNodePath && m == "GET":
|
|
|
|
return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, &expectedNodes)}, nil
|
|
|
|
default:
|
|
|
|
t.Fatalf("unexpected request: %#v\nGot URL: %#v\n", req, req.URL)
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
|
|
|
|
fakemetricsClientset := &metricsfake.Clientset{}
|
|
|
|
fakemetricsClientset.AddReactor("list", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) {
|
|
|
|
return true, expectedMetrics, nil
|
|
|
|
})
|
|
|
|
tf.Namespace = "test"
|
2018-02-22 14:52:10 +00:00
|
|
|
tf.ClientConfigVal = defaultClientConfig()
|
2017-11-15 19:41:04 +00:00
|
|
|
buf := bytes.NewBuffer([]byte{})
|
|
|
|
|
2018-02-22 14:52:10 +00:00
|
|
|
cmd := NewCmdTopNode(tf, nil, buf)
|
2017-11-15 19:41:04 +00:00
|
|
|
cmd.Flags().Set("selector", label)
|
2018-02-21 16:01:09 +00:00
|
|
|
|
|
|
|
// TODO in the long run, we want to test most of our commands like this. Wire the options struct with specific mocks
|
|
|
|
// TODO then check the particular Run functionality and harvest results from fake clients
|
|
|
|
cmdOptions := &TopNodeOptions{}
|
2018-02-22 14:52:10 +00:00
|
|
|
if err := cmdOptions.Complete(tf, cmd, []string{}, buf); err != nil {
|
2018-02-21 16:01:09 +00:00
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
cmdOptions.MetricsClient = fakemetricsClientset
|
|
|
|
if err := cmdOptions.Validate(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
if err := cmdOptions.RunTopNode(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2017-11-15 19:41:04 +00:00
|
|
|
|
|
|
|
// Check the presence of node names in the output.
|
|
|
|
result := buf.String()
|
|
|
|
for _, m := range expectedMetrics.Items {
|
|
|
|
if !strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, m := range nonExpectedMetrics.Items {
|
|
|
|
if strings.Contains(result, m.Name) {
|
|
|
|
t.Errorf("unexpected metrics for %s: \n%s", m.Name, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|