зеркало из https://github.com/Azure/ARO-RP.git
Merge pull request #3664 from Azure/s-fairchild/ARO-8852-hive-deploy-script
Update hive-generate-config.sh to accept differing versions
This commit is contained in:
Коммит
b9400c448e
42
docs/hive.md
42
docs/hive.md
|
@ -2,7 +2,13 @@
|
|||
|
||||
## Version
|
||||
|
||||
Update the HIVE_IMAGE_COMMIT_HASH in `hack/hive-generate-config.sh` with the latest commit sha of the Hive image you are deploying. The commit sha is used to specify the image tag and also used during config generation to checkout the correct version of the config files. The config files are subsequently used by the `hack/hive-dev-install.sh` script during iunstallation or during config updates.
|
||||
The commit sha is used to specify the image tag and also used during config generation to checkout the correct version of the config files. The config files are subsequently used by the `hack/hive-dev-install.sh` script during installation or during config updates.
|
||||
|
||||
1. You can either
|
||||
1. Provide the hive image commit has as an argument to `hack/hive-generate-config.sh`. This is useful for testing new hive images before hive releases.
|
||||
1. Example: `./hack/hive-generate-config.sh d7ead609f4`
|
||||
2. Accept the default version by providing no arguments, which should be the latest.
|
||||
1. Example: `./hack/hive-generate-config.sh`
|
||||
|
||||
## Generating config
|
||||
|
||||
|
@ -19,19 +25,21 @@ This will download the latest source, reset to the hash specified in HIVE_IMAGE_
|
|||
|
||||
## Installing
|
||||
|
||||
Ensure you have the latest AKS kubeconfig:
|
||||
```bash
|
||||
# get the AKS kubeconfig
|
||||
make aks.kubeconfig
|
||||
```
|
||||
|
||||
Set KUBECONFIG to the aks.kubeconfig file, for example:
|
||||
```bash
|
||||
export KUBECONFIG="$PWD/aks.kubeconfig"
|
||||
```
|
||||
|
||||
Installing then simply requires the running of the install script.
|
||||
|
||||
```bash
|
||||
./hack/hive-dev-install.sh
|
||||
```
|
||||
1. Connect to the appropriate aks vpn
|
||||
1. vpn-aks-westeurope.ovpn
|
||||
2. vpn-aks-australiaeast.ovpn
|
||||
3. vpn-aks-australiaeast.ovpn
|
||||
2. Ensure you have the latest AKS kubeconfig
|
||||
```bash
|
||||
# get the AKS kubeconfig
|
||||
. ./env
|
||||
make aks.kubeconfig
|
||||
```
|
||||
3. Set KUBECONFIG to the aks.kubeconfig file, for example:
|
||||
```bash
|
||||
export KUBECONFIG="$PWD/aks.kubeconfig"
|
||||
```
|
||||
4. Installing then simply requires the running of the install script.
|
||||
```bash
|
||||
./hack/hive-dev-install.sh
|
||||
```
|
||||
|
|
|
@ -1,94 +1,143 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This is the commit sha that the image was built from and ensures we use the correct configs for the release
|
||||
HIVE_IMAGE_COMMIT_HASH="d7ead609f4"
|
||||
set -o errexit \
|
||||
-o nounset
|
||||
|
||||
# For now we'll use the quay hive image, but this will change to an ACR once the quay.io -> ACR mirroring is setup
|
||||
# Note: semi-scientific way to get the latest image: `podman search --list-tags --limit 10000 quay.io/app-sre/hive | tail -n1`
|
||||
HIVE_IMAGE="quay.io/app-sre/hive:${HIVE_IMAGE_COMMIT_HASH}"
|
||||
main() {
|
||||
local -r tmpdir="$(mktemp -d)"
|
||||
# shellcheck disable=SC2064
|
||||
trap "cleanup $tmpdir" EXIT
|
||||
|
||||
HIVE_OPERATOR_NS="hive"
|
||||
# This is the commit sha that the image was built from and ensures we use the correct configs for the release
|
||||
local -r default_commit="d7ead609f4"
|
||||
local -r hive_image_commit_hash="${1:-$default_commit}"
|
||||
log "Using hive commit: $hive_image_commit_hash"
|
||||
# shellcheck disable=SC2034
|
||||
local -r hive_operator_namespace="hive"
|
||||
|
||||
# This version is specified in the hive repo and is the only hard dependency for this script
|
||||
# https://github.com/openshift/hive/blob/master/vendor/github.com/openshift/build-machinery-go/make/targets/openshift/kustomize.mk#L7
|
||||
KUSTOMIZE_VERSION=4.1.3
|
||||
KUSTOMIZE=$( which kustomize 2>/dev/null )
|
||||
TMPDIR=$( mktemp -d )
|
||||
# For now we'll use the quay hive image, but this will change to an ACR once the quay.io -> ACR mirroring is setup
|
||||
# Note: semi-scientific way to get the latest image: `podman search --list-tags --limit 10000 quay.io/app-sre/hive | tail -n1`
|
||||
# shellcheck disable=SC2034
|
||||
local -r hive_image="quay.io/app-sre/hive:${hive_image_commit_hash}"
|
||||
|
||||
function cleanup {
|
||||
popd >& /dev/null
|
||||
[ -d "$TMPDIR" ] && rm -fr "$TMPDIR"
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
local kustomize_bin
|
||||
install_kustomize tmpdir \
|
||||
kustomize_bin
|
||||
hive_repo_clone tmpdir
|
||||
hive_repo_hash_checkout tmpdir \
|
||||
hive_image_commit_hash
|
||||
generate_hive_config kustomize_bin \
|
||||
hive_operator_namespace \
|
||||
hive_image \
|
||||
tmpdir
|
||||
|
||||
log "Hive config generated."
|
||||
}
|
||||
|
||||
function verify_kustomize {
|
||||
if [ ! -z "$KUSTOMIZE" ]; then
|
||||
return
|
||||
fi
|
||||
echo -n "kustomize not detected, downloading "
|
||||
curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/kustomize/v${KUSTOMIZE_VERSION}/hack/install_kustomize.sh" | bash -s "$KUSTOMIZE_VERSION" "$TMPDIR"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "error downloading kustomize"
|
||||
exit 1
|
||||
fi
|
||||
KUSTOMIZE="${TMPDIR}/kustomize"
|
||||
install_kustomize() {
|
||||
local -n tmpd="$1"
|
||||
local -n kustomize="$2"
|
||||
log "starting"
|
||||
|
||||
if kustomize="$(which kustomize 2> /dev/null)"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
pushd "$tmpd" 1> /dev/null
|
||||
|
||||
# This version is specified in the hive repo and is the only hard dependency for this script
|
||||
# https://github.com/openshift/hive/blob/master/vendor/github.com/openshift/build-machinery-go/make/targets/openshift/kustomize.mk#L7
|
||||
local -r kustomize_version="4.1.3"
|
||||
log "kustomize not detected, downloading..."
|
||||
if ! curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/kustomize/v${kustomize_version}/hack/install_kustomize.sh" | bash -s "$kustomize_version" "$tmpd" 1> /dev/null; then
|
||||
abort "error downloading kustomize"
|
||||
fi
|
||||
|
||||
if [ ! -d "${HOME}/bin" ]; then
|
||||
mkdir -p "${HOME}/bin"
|
||||
fi
|
||||
|
||||
kustomize_new="${tmpd}/kustomize"
|
||||
kustomize_dest="${HOME}/bin/kustomize"
|
||||
log "Installing $kustomize_new into $kustomize_dest"
|
||||
mv "$kustomize_new" "$kustomize_dest"
|
||||
|
||||
popd 1> /dev/null
|
||||
|
||||
kustomize="$(which kustomize)"
|
||||
}
|
||||
|
||||
function hive_repo_clone {
|
||||
echo -n "Cloning hive repo into tmp for config generation"
|
||||
CLONE_ERROR=$(git clone https://github.com/openshift/hive.git "$TMPDIR" 2>/dev/null )
|
||||
if [ $? -ne 0 ]; then
|
||||
echo ": error cloning the hive repo: ${CLONE_ERROR}"
|
||||
exit 1
|
||||
fi
|
||||
echo ", done."
|
||||
hive_repo_clone() {
|
||||
local -n tmpd="$1"
|
||||
log "starting"
|
||||
|
||||
local -r repo="https://github.com/openshift/hive.git"
|
||||
log "Cloning $repo into $tmpd for config generation"
|
||||
if ! git clone "$repo" "$tmpd"; then
|
||||
log "error cloning the hive repo"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function hive_repo_hash_checkout {
|
||||
# go into $TMPDIR and checkout the commit the image was built with
|
||||
pushd $TMPDIR >& /dev/null
|
||||
git reset --hard $HIVE_IMAGE_COMMIT_HASH
|
||||
if [ $? -ne 0 ] || [[ $( git rev-parse --short=${#HIVE_IMAGE_COMMIT_HASH} HEAD ) != ${HIVE_IMAGE_COMMIT_HASH} ]]; then
|
||||
echo "error resetting the hive repo to the correct git hash '${HIVE_IMAGE_COMMIT_HASH}'"
|
||||
exit 1
|
||||
fi
|
||||
hive_repo_hash_checkout() {
|
||||
local -n tmpd="$1"
|
||||
local -n commit="$2"
|
||||
log "starting"
|
||||
log "Attempting to use commit: $commit"
|
||||
|
||||
pushd "$tmpd" 1> /dev/null
|
||||
git reset --hard "$commit"
|
||||
if [ "$?" -ne 0 ] || [ "$( git rev-parse --short="${#commit}" HEAD )" != "$commit" ]; then
|
||||
abort "error resetting the hive repo to the correct git hash '${commit}'"
|
||||
fi
|
||||
|
||||
popd 1> /dev/null
|
||||
}
|
||||
|
||||
function generate_hive_config {
|
||||
# Create the hive operator install config using kustomize
|
||||
mkdir -p overlays/deploy
|
||||
cp overlays/template/kustomization.yaml overlays/deploy
|
||||
pushd overlays/deploy >& /dev/null
|
||||
$KUSTOMIZE edit set image registry.ci.openshift.org/openshift/hive-v4.0:hive=$HIVE_IMAGE
|
||||
$KUSTOMIZE edit set namespace $HIVE_OPERATOR_NS
|
||||
popd >& /dev/null
|
||||
generate_hive_config() {
|
||||
local -n kustomize="$1"
|
||||
local -n namespace="$2"
|
||||
local -n image="$3"
|
||||
local -n tmpd="$4"
|
||||
log "starting"
|
||||
|
||||
pushd "$tmpd" 1> /dev/null
|
||||
# Create the hive operator install config using kustomize
|
||||
mkdir -p overlays/deploy
|
||||
cp overlays/template/kustomization.yaml overlays/deploy
|
||||
pushd overlays/deploy >& /dev/null
|
||||
$kustomize edit set image registry.ci.openshift.org/openshift/hive-v4.0:hive="$image"
|
||||
$kustomize edit set namespace "$namespace"
|
||||
popd >& /dev/null
|
||||
|
||||
$KUSTOMIZE build overlays/deploy > hive-deployment.yaml
|
||||
$kustomize build overlays/deploy > hive-deployment.yaml
|
||||
|
||||
# return to the repo directory to copy the generated config from $TMPDIR
|
||||
popd >& /dev/null
|
||||
mv "$TMPDIR/hive-deployment.yaml" ./hack/hive-config/
|
||||
# return to the repo directory to copy the generated config from $TMPDIR
|
||||
popd 1> /dev/null
|
||||
mv "$tmpd/hive-deployment.yaml" ./hack/hive-config/
|
||||
|
||||
if [ -d ./hack/hive-config/crds ]; then
|
||||
rm -fr ./hack/hive-config/crds
|
||||
fi
|
||||
cp -R "$TMPDIR/config/crds" ./hack/hive-config/
|
||||
if [ -d ./hack/hive-config/crds ]; then
|
||||
rm -rf ./hack/hive-config/crds
|
||||
fi
|
||||
cp -R "$tmpd/config/crds" ./hack/hive-config/
|
||||
}
|
||||
|
||||
set -e
|
||||
trap cleanup EXIT
|
||||
|
||||
if [ ! -f go.mod ] || [ ! -d ".git" ]; then
|
||||
echo "this script must by run from the repo's root directory"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "$TMPDIR" || ! -d "$TMPDIR" ]]; then
|
||||
echo "could not create temp working dir"
|
||||
exit 1
|
||||
echo "this script must by run from the repo's root directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
hive_repo_clone
|
||||
hive_repo_hash_checkout
|
||||
verify_kustomize
|
||||
generate_hive_config
|
||||
declare -r util_script="hack/util.sh"
|
||||
if [ -f $util_script ]; then
|
||||
# shellcheck source=util.sh
|
||||
source "$util_script"
|
||||
fi
|
||||
|
||||
echo -e "\nHive config generated."
|
||||
cleanup() {
|
||||
local tmpd="$1"
|
||||
[ -d "$tmpd" ] && rm -fr "$tmpd"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ "${DEBUG:-false}" == true ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
# log is a wrapper for echo that includes the function name
|
||||
# Args
|
||||
# 1) msg - string
|
||||
# 2) stack_level - int; optional, defaults to calling function
|
||||
log() {
|
||||
local -r msg="${1:-"log message is empty"}"
|
||||
local -r stack_level="${2:-1}"
|
||||
echo "${FUNCNAME[${stack_level}]}: ${msg}"
|
||||
}
|
||||
|
||||
# abort is a wrapper for log that exits with an error code
|
||||
abort() {
|
||||
local -ri origin_stacklevel=2
|
||||
log "${1}" "$origin_stacklevel"
|
||||
log "Exiting"
|
||||
exit 1
|
||||
}
|
Загрузка…
Ссылка в новой задаче