mirror of https://github.com/kubernetes/kops.git
Merge pull request #3490 from justinsb/download_with_wget
Automatic merge from submit-queue. Support wget for download, not just curl
This commit is contained in:
commit
1c81ec5e42
|
@ -66,9 +66,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -69,9 +69,23 @@ download-or-bust() {
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
|
@ -49,9 +49,23 @@ Resources.AWSAutoScalingLaunchConfigurationmasterustest1amastersminimalexampleco
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
@ -303,9 +317,23 @@ Resources.AWSAutoScalingLaunchConfigurationnodesminimalexamplecom.Properties.Use
|
|||
for url in "${urls[@]}"; do
|
||||
local file="${url##*/}"
|
||||
rm -f "${file}"
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to download ${url}. Retrying. =="
|
||||
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
|
||||
if [[ $(which curl) ]]; then
|
||||
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --retry 6 --retry-delay 10 "${url}"; then
|
||||
echo "== Failed to curl ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
elif [[ $(which wget ) ]]; then
|
||||
if ! wget --inet4-only -O "${file}" --connect-timeout=20 --tries=6 --wait=10 "${url}"; then
|
||||
echo "== Failed to wget ${url}. Retrying. =="
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "== Could not find curl or wget. Retrying. =="
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
|
||||
echo "== Hash validation of ${url} failed. Retrying. =="
|
||||
else
|
||||
if [[ -n "${hash}" ]]; then
|
||||
|
|
Loading…
Reference in New Issue