mirror of https://github.com/k3s-io/k3s
Update elasticsearch and kibana images
parent
ead65fc25f
commit
5b79b65bd3
|
@ -16,28 +16,37 @@
|
|||
# to work with Kubernetes logging. Inspired by the Dockerfile
|
||||
# dockerfile/elasticsearch
|
||||
|
||||
FROM java:openjdk-7-jre
|
||||
MAINTAINER Satnam Singh "satnam@google.com"
|
||||
FROM java:openjdk-8-jre
|
||||
|
||||
MAINTAINER Mik Vyatskov "vmik@google.com"
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV ELASTICSEARCH_VERSION 2.4.1
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y curl && \
|
||||
apt-get clean
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y curl \
|
||||
&& apt-get clean
|
||||
|
||||
RUN cd / && \
|
||||
curl -O https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.5.2.tar.gz && \
|
||||
tar xf elasticsearch-1.5.2.tar.gz && \
|
||||
rm elasticsearch-1.5.2.tar.gz
|
||||
RUN set -x \
|
||||
&& cd / \
|
||||
&& mkdir /elasticsearch \
|
||||
&& curl -O https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/$ELASTICSEARCH_VERSION/elasticsearch-$ELASTICSEARCH_VERSION.tar.gz \
|
||||
&& tar xf elasticsearch-$ELASTICSEARCH_VERSION.tar.gz -C /elasticsearch --strip-components=1 \
|
||||
&& rm elasticsearch-$ELASTICSEARCH_VERSION.tar.gz
|
||||
|
||||
RUN mkdir -p /elasticsearch-1.5.2/config/templates
|
||||
RUN mkdir -p /elasticsearch/config/templates
|
||||
COPY template-k8s-logstash.json /elasticsearch/config/templates/template-k8s-logstash.json
|
||||
|
||||
COPY config /elasticsearch/config
|
||||
|
||||
COPY elasticsearch.yml /elasticsearch-1.5.2/config/elasticsearch.yml
|
||||
COPY template-k8s-logstash.json /elasticsearch-1.5.2/config/templates/template-k8s-logstash.json
|
||||
COPY run.sh /
|
||||
COPY elasticsearch_logging_discovery /
|
||||
|
||||
RUN useradd --no-create-home --user-group elasticsearch \
|
||||
&& mkdir /data \
|
||||
&& chown -R elasticsearch:elasticsearch /elasticsearch /elasticsearch_logging_discovery /run.sh /data
|
||||
|
||||
VOLUME ["/data"]
|
||||
EXPOSE 9200 9300
|
||||
|
||||
CMD ["/run.sh"]
|
||||
CMD ["/bin/su", "-c", "/run.sh", "elasticsearch"]
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
# The current value of the tag to be used for building and
|
||||
# pushing an image to gcr.io
|
||||
TAG = 1.9
|
||||
TAG = v2.4.1
|
||||
|
||||
build: elasticsearch_logging_discovery
|
||||
docker build -t gcr.io/google_containers/elasticsearch:$(TAG) .
|
||||
|
@ -25,7 +25,7 @@ push:
|
|||
gcloud docker push gcr.io/google_containers/elasticsearch:$(TAG)
|
||||
|
||||
elasticsearch_logging_discovery:
|
||||
go build elasticsearch_logging_discovery.go
|
||||
go build -a -ldflags "-w" elasticsearch_logging_discovery.go
|
||||
|
||||
clean:
|
||||
rm elasticsearch_logging_discovery
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
cluster.name: kubernetes-logging
|
||||
|
||||
node.master: ${NODE_MASTER}
|
||||
node.data: ${NODE_DATA}
|
||||
|
||||
transport.tcp.port: ${TRANSPORT_PORT}
|
||||
http.port: ${HTTP_PORT}
|
||||
discovery.zen.ping.multicast.enabled: false
|
||||
discovery.zen.minimum_master_nodes: 2
|
||||
|
||||
path.data: /data
|
||||
|
||||
network.host: 0.0.0.0
|
||||
|
||||
discovery.zen.minimum_master_nodes: ${MINIMUM_MASTER_NODES}
|
||||
discovery.zen.ping.multicast.enabled: false
|
|
@ -0,0 +1,15 @@
|
|||
# you can override this using by setting a system property, for example -Des.logger.level=DEBUG
|
||||
es.logger.level: INFO
|
||||
rootLogger: ${es.logger.level}, console
|
||||
logger:
|
||||
# log action execution errors for easier debugging
|
||||
action: DEBUG
|
||||
# reduce the logging for aws, too much is logged under the default INFO
|
||||
com.amazonaws: WARN
|
||||
|
||||
appender:
|
||||
console:
|
||||
type: console
|
||||
layout:
|
||||
type: consolePattern
|
||||
conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
|
|
@ -16,7 +16,10 @@
|
|||
|
||||
export NODE_MASTER=${NODE_MASTER:-true}
|
||||
export NODE_DATA=${NODE_DATA:-true}
|
||||
/elasticsearch_logging_discovery >> /elasticsearch-1.5.2/config/elasticsearch.yml
|
||||
export HTTP_PORT=${HTTP_PORT:-9200}
|
||||
export TRANSPORT_PORT=${TRANSPORT_PORT:-9300}
|
||||
/elasticsearch-1.5.2/bin/elasticsearch
|
||||
export MINIMUM_MASTER_NODES=${MINIMUM_MASTER_NODES:-2}
|
||||
|
||||
/elasticsearch_logging_discovery >> /elasticsearch/config/elasticsearch.yml
|
||||
|
||||
/elasticsearch/bin/elasticsearch
|
||||
|
|
|
@ -15,21 +15,26 @@
|
|||
# A Dockerfile for creating a Kibana container that is designed
|
||||
# to work with Kubernetes logging.
|
||||
|
||||
FROM java:openjdk-7-jre
|
||||
MAINTAINER Satnam Singh "satnam@google.com"
|
||||
FROM gcr.io/google_containers/ubuntu-slim:0.4
|
||||
|
||||
MAINTAINER Mik Vyatskov "vmik@google.com"
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV KIBANA_VERSION 4.6.1
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y curl && \
|
||||
apt-get clean
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y curl \
|
||||
&& apt-get clean
|
||||
|
||||
RUN cd / && \
|
||||
curl -O https://download.elastic.co/kibana/kibana/kibana-4.0.2-linux-x64.tar.gz && \
|
||||
tar xf kibana-4.0.2-linux-x64.tar.gz && \
|
||||
rm kibana-4.0.2-linux-x64.tar.gz
|
||||
RUN set -x \
|
||||
&& cd / \
|
||||
&& mkdir /kibana \
|
||||
&& curl -O https://download.elastic.co/kibana/kibana/kibana-$KIBANA_VERSION-linux-x86_64.tar.gz \
|
||||
&& tar xf kibana-$KIBANA_VERSION-linux-x86_64.tar.gz -C /kibana --strip-components=1 \
|
||||
&& rm kibana-$KIBANA_VERSION-linux-x86_64.tar.gz
|
||||
|
||||
COPY run.sh /run.sh
|
||||
|
||||
EXPOSE 5601
|
||||
|
||||
CMD ["/run.sh"]
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
.PHONY: build push
|
||||
|
||||
TAG = 1.3
|
||||
TAG = v4.6.1
|
||||
|
||||
build:
|
||||
docker build -t gcr.io/google_containers/kibana:$(TAG) .
|
||||
|
|
|
@ -16,4 +16,5 @@
|
|||
|
||||
export ELASTICSEARCH_URL=${ELASTICSEARCH_URL:-"http://localhost:9200"}
|
||||
echo ELASTICSEARCH_URL=${ELASTICSEARCH_URL}
|
||||
/kibana-4.0.2-linux-x64/bin/kibana -e ${ELASTICSEARCH_URL}
|
||||
|
||||
/kibana/bin/kibana -e ${ELASTICSEARCH_URL}
|
||||
|
|
Loading…
Reference in New Issue