Also push .sha1 for devel builds

And adds a .sha1 cache file to indicate what file was already pushed
to GCS, and how to force it if not, removing a few seconds off a
kube-up/push if you're just cycling.

With this and #7602, all TAR_URLS will have a .sha1 as well.
pull/6/head
Zach Loafman 2015-05-01 10:50:18 -07:00
parent 230449f122
commit 0c107e4c44
1 changed files with 43 additions and 4 deletions

View File

@ -132,6 +132,41 @@ function detect-project () {
fi fi
} }
function sha1sum-file() {
if which shasum >/dev/null 2>&1; then
shasum -a1 "$1" | awk '{ print $1 }'
else
sha1sum "$1" | awk '{ print $1 }'
fi
}
function already-staged() {
local -r file=$1
local -r newsum=$2
[[ -e "${file}.sha1" ]] || return 1
local oldsum
oldsum=$(cat "${file}.sha1")
[[ "${oldsum}" == "${newsum}" ]]
}
# Copy a release tar, if we don't already think it's staged in GCS
function copy-if-not-staged() {
local -r staging_path=$1
local -r gs_url=$2
local -r tar=$3
local -r hash=$4
if already-staged "${tar}" "${hash}"; then
echo "+++ $(basename ${tar}) already staged ('rm ${tar}.sha1' to force)"
else
echo "${server_hash}" > "${tar}.sha1"
gsutil -m -q -h "Cache-Control:private, max-age=0" cp "${tar}" "${tar}.sha1" "${staging_path}"
gsutil -m acl ch -g all:R "${gs_url}" "${gs_url}.sha1" >/dev/null 2>&1
fi
}
# Take the local tar files and upload them to Google Storage. They will then be # Take the local tar files and upload them to Google Storage. They will then be
# downloaded by the master as part of the start up script for the master. # downloaded by the master as part of the start up script for the master.
@ -153,6 +188,7 @@ function upload-server-tars() {
else else
project_hash=$(echo -n "$PROJECT" | md5sum | awk '{ print $1 }') project_hash=$(echo -n "$PROJECT" | md5sum | awk '{ print $1 }')
fi fi
# This requires 1 million projects before the probability of collision is 50% # This requires 1 million projects before the probability of collision is 50%
# that's probably good enough for now :P # that's probably good enough for now :P
project_hash=${project_hash:0:10} project_hash=${project_hash:0:10}
@ -167,13 +203,16 @@ function upload-server-tars() {
local -r staging_path="${staging_bucket}/devel" local -r staging_path="${staging_bucket}/devel"
local server_hash
local salt_hash
server_hash=$(sha1sum-file "${SERVER_BINARY_TAR}")
salt_hash=$(sha1sum-file "${SALT_TAR}")
echo "+++ Staging server tars to Google Storage: ${staging_path}" echo "+++ Staging server tars to Google Storage: ${staging_path}"
local server_binary_gs_url="${staging_path}/${SERVER_BINARY_TAR##*/}" local server_binary_gs_url="${staging_path}/${SERVER_BINARY_TAR##*/}"
gsutil -q -h "Cache-Control:private, max-age=0" cp "${SERVER_BINARY_TAR}" "${server_binary_gs_url}"
gsutil acl ch -g all:R "${server_binary_gs_url}" >/dev/null 2>&1
local salt_gs_url="${staging_path}/${SALT_TAR##*/}" local salt_gs_url="${staging_path}/${SALT_TAR##*/}"
gsutil -q -h "Cache-Control:private, max-age=0" cp "${SALT_TAR}" "${salt_gs_url}" copy-if-not-staged "${staging_path}" "${server_binary_gs_url}" "${SERVER_BINARY_TAR}" "${server_hash}"
gsutil acl ch -g all:R "${salt_gs_url}" >/dev/null 2>&1 copy-if-not-staged "${staging_path}" "${salt_gs_url}" "${SALT_TAR}" "${salt_hash}"
# Convert from gs:// URL to an https:// URL # Convert from gs:// URL to an https:// URL
SERVER_BINARY_TAR_URL="${server_binary_gs_url/gs:\/\//https://storage.googleapis.com/}" SERVER_BINARY_TAR_URL="${server_binary_gs_url/gs:\/\//https://storage.googleapis.com/}"