mirror of https://github.com/k3s-io/k3s
commit
3a5768a0ad
|
@ -4,7 +4,7 @@ all: push
|
|||
# this tag and reset to v1. You should also double check the native
|
||||
# Hadoop libs at that point (we grab the 2.6.1 libs, which are
|
||||
# appropriate for 1.5.1-with-2.6).
|
||||
TAG = 1.5.1_v1
|
||||
TAG = 1.5.1_v2
|
||||
|
||||
containers:
|
||||
docker build -t gcr.io/google_containers/spark-base base
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
FROM java:latest
|
||||
|
||||
RUN apt-get update -y
|
||||
RUN apt-get install -y scala
|
||||
|
||||
ENV hadoop_ver 2.6.1
|
||||
ENV spark_ver 1.5.1
|
||||
|
||||
|
@ -30,7 +27,6 @@ RUN mkdir -p /opt && \
|
|||
RUN wget -O /opt/spark/lib/gcs-connector-latest-hadoop2.jar https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-latest-hadoop2.jar
|
||||
|
||||
ADD log4j.properties /opt/spark/conf/log4j.properties
|
||||
ADD setup_client.sh /
|
||||
ADD start-common.sh /
|
||||
ADD core-site.xml /opt/spark/conf/core-site.xml
|
||||
ADD spark-defaults.conf /opt/spark/conf/spark-defaults.conf
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright 2015 The Kubernetes Authors All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [[ $# != 2 || $1 == "" || $2 == "" ]]; then
|
||||
echo "Usage: . ./setup_client.sh master_address master_port"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$1 spark-master" >> /etc/hosts
|
||||
export SPARK_LOCAL_HOSTNAME=$(hostname -i)
|
||||
export MASTER=spark://spark-master:$2
|
|
@ -17,8 +17,6 @@
|
|||
. /start-common.sh
|
||||
|
||||
echo "$(hostname -i) spark-master" >> /etc/hosts
|
||||
export SPARK_LOCAL_HOSTNAME=spark-master
|
||||
export SPARK_MASTER_IP=spark-master
|
||||
|
||||
/opt/spark/sbin/start-master.sh
|
||||
tail -F /opt/spark/logs/*
|
||||
# Run spark-class directly so that when it exits (or crashes), the pod restarts.
|
||||
/opt/spark/bin/spark-class org.apache.spark.deploy.master.Master --ip spark-master --port 7077 --webui-port 8080
|
||||
|
|
|
@ -2,6 +2,6 @@ FROM gcr.io/google_containers/spark-base:latest
|
|||
|
||||
ADD start.sh /
|
||||
ADD log4j.properties /opt/spark/conf/log4j.properties
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
ENTRYPOINT ["/start.sh"]
|
||||
|
|
|
@ -16,6 +16,5 @@
|
|||
|
||||
. /start-common.sh
|
||||
|
||||
/opt/spark/sbin/start-slave.sh spark://spark-master:7077
|
||||
|
||||
tail -F /opt/spark/logs/*
|
||||
# Run spark-class directly so that when it exits (or crashes), the pod restarts.
|
||||
/opt/spark/bin/spark-class org.apache.spark.deploy.worker.Worker spark://spark-master:7077 --webui-port 8081
|
||||
|
|
|
@ -13,7 +13,7 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: spark-driver
|
||||
image: gcr.io/google_containers/spark-driver:1.5.1_v1
|
||||
image: gcr.io/google_containers/spark-driver:1.5.1_v2
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
|
|
|
@ -15,7 +15,7 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: spark-master
|
||||
image: gcr.io/google_containers/spark-master:1.5.1_v1
|
||||
image: gcr.io/google_containers/spark-master:1.5.1_v2
|
||||
ports:
|
||||
- containerPort: 7077
|
||||
volumeMounts:
|
||||
|
|
|
@ -16,7 +16,7 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: spark-worker
|
||||
image: gcr.io/google_containers/spark-worker:1.5.1_v1
|
||||
image: gcr.io/google_containers/spark-worker:1.5.1_v2
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
volumeMounts:
|
||||
|
|
|
@ -13,19 +13,10 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: spark-master
|
||||
image: gcr.io/google_containers/spark-master:1.5.1_v1
|
||||
image: gcr.io/google_containers/spark-master:1.5.1_v2
|
||||
ports:
|
||||
- containerPort: 7077
|
||||
- containerPort: 8080
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- /opt/spark/sbin/spark-daemon.sh
|
||||
- status
|
||||
- org.apache.spark.deploy.master.Master
|
||||
- '1'
|
||||
initialDelaySeconds: 30
|
||||
timeoutSeconds: 1
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
|
|
|
@ -13,18 +13,9 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: spark-worker
|
||||
image: gcr.io/google_containers/spark-worker:1.5.1_v1
|
||||
image: gcr.io/google_containers/spark-worker:1.5.1_v2
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- /opt/spark/sbin/spark-daemon.sh
|
||||
- status
|
||||
- org.apache.spark.deploy.worker.Worker
|
||||
- '1'
|
||||
initialDelaySeconds: 30
|
||||
timeoutSeconds: 1
|
||||
- containerPort: 8081
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
|
|
Loading…
Reference in New Issue