From 6a921fb909ba1e101e74406f30490d8d04e88b64 Mon Sep 17 00:00:00 2001 From: Shuhuan Yan <71528407+syan-tibco@users.noreply.github.com> Date: Wed, 29 Jan 2025 21:50:59 -0600 Subject: [PATCH] [PCP-9347] externalize/enhance TP setup automation (#50) --- charts/provisioner-config-local/Chart.yaml | 2 +- .../recipes/pp-deploy-cp-core-on-prem.yaml | 2 +- .../recipes/tp-automation-o11y.yaml | 55 +++- .../recipes/tp-base-on-prem-https.yaml | 2 +- dev/platform-provisioner-test.sh | 3 +- dev/platform-provisioner.sh | 3 +- docs/recipes/automation/on-perm/README.md | 51 ++-- .../automation/on-perm/adjust-recipe.sh | 60 ++++- .../automation/on-perm/generate-recipe.sh | 21 +- docs/recipes/automation/on-perm/run.sh | 237 ++++++++++-------- ...date-tokens.sh => update-recipe-tokens.sh} | 45 +++- .../automation/tp-setup/bootstrap/env.py | 14 +- .../automation/tp-setup/bootstrap/helper.py | 44 ++-- .../tp-setup/bootstrap/page_auth.py | 6 +- .../automation/tp-setup/bootstrap/page_env.py | 4 + .../automation/tp-setup/bootstrap/report.py | 171 +++++++++++++ .../automation/tp-setup/bootstrap/run.py | 169 +++++++++---- .../automation/tp-setup/bootstrap/util.py | 115 +++++++-- 18 files changed, 752 insertions(+), 252 deletions(-) rename docs/recipes/automation/on-perm/{update-tokens.sh => update-recipe-tokens.sh} (66%) create mode 100644 docs/recipes/automation/tp-setup/bootstrap/page_env.py create mode 100644 docs/recipes/automation/tp-setup/bootstrap/report.py diff --git a/charts/provisioner-config-local/Chart.yaml b/charts/provisioner-config-local/Chart.yaml index d196e32..a48cda0 100644 --- a/charts/provisioner-config-local/Chart.yaml +++ b/charts/provisioner-config-local/Chart.yaml @@ -8,7 +8,7 @@ apiVersion: v2 name: provisioner-config-local description: Platform Provisioner local config type: application -version: "1.1.16" +version: "1.1.19" appVersion: "2.0.0" home: https://github.com/TIBCOSoftware/tp-helm-charts maintainers: diff --git a/charts/provisioner-config-local/recipes/pp-deploy-cp-core-on-prem.yaml b/charts/provisioner-config-local/recipes/pp-deploy-cp-core-on-prem.yaml index b873bd5..249d675 100644 --- a/charts/provisioner-config-local/recipes/pp-deploy-cp-core-on-prem.yaml +++ b/charts/provisioner-config-local/recipes/pp-deploy-cp-core-on-prem.yaml @@ -26,7 +26,7 @@ meta: # CP version see: https://docs.tibco.com/pub/platform-cp/1.3.0/doc/html/Default.htm#Installation/helm-chart-version-matrix.htm GUI_CP_PLATFORM_BOOTSTRAP_VERSION: 1.3.53 - GUI_CP_PLATFORM_BASE_VERSION: 1.3.437 + GUI_CP_PLATFORM_BASE_VERSION: 1.3.439 # HF4 # CP env GUI_CP_INSTANCE_ID: "cp1" diff --git a/charts/provisioner-config-local/recipes/tp-automation-o11y.yaml b/charts/provisioner-config-local/recipes/tp-automation-o11y.yaml index d543cee..83e162b 100644 --- a/charts/provisioner-config-local/recipes/tp-automation-o11y.yaml +++ b/charts/provisioner-config-local/recipes/tp-automation-o11y.yaml @@ -18,10 +18,10 @@ meta: GUI_TP_AUTO_CP_VERSION: "" # keep it empty, unless you see an error with GUI_TP_AUTO_CP_VERSION in the logs GUI_TP_AUTO_USE_LOCAL_SCRIPT: false GUI_TP_AUTO_USE_GITHUB_SCRIPT: true - GUI_TP_AUTO_SCREENSHOT_PATH: "/tmp/auto/screenshots" + GUI_TP_AUTO_REPORT_PATH: "/tmp/auto/report" GUI_TP_AUTO_GITHUB_REPO_NAME: "github.com/TIBCOSoftware/platform-provisioner" GUI_TP_AUTO_GITHUB_REPO_PATH: "docs/recipes/automation/tp-setup/bootstrap" - GUI_TP_AUTO_GITHUB_REPO_BRANCH: "provisioner-config-local-1.1.16" + GUI_TP_AUTO_GITHUB_REPO_BRANCH: "provisioner-config-local-{{ .Chart.Version }}" # DP capabilities deployment GUI_TP_AUTO_ACTIVE_USER: true @@ -56,6 +56,7 @@ meta: GUI_TP_AUTO_K8S_DP_NAMESPACE: "k8s-auto-dp1ns" GUI_TP_AUTO_K8S_DP_SERVICE_ACCOUNT: "k8s-auto-dp1sa" GUI_TP_AUTO_INGRESS_CONTROLLER: "nginx" + GUI_TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME: "nginx" GUI_TP_AUTO_STORAGE_CLASS: "hostpath" # DP o11y setup @@ -87,10 +88,11 @@ meta: TP_AUTO_USE_LOCAL_SCRIPT: ${GUI_TP_AUTO_USE_LOCAL_SCRIPT:-false} TP_AUTO_USE_GITHUB_SCRIPT: ${GUI_TP_AUTO_USE_GITHUB_SCRIPT:-true} TP_AUTO_SCRIPT_FOLDER: ${GUI_TP_AUTO_SCRIPT_FOLDER:-"/tmp/auto"} - TP_AUTO_SCREENSHOT_PATH: ${GUI_TP_AUTO_SCREENSHOT_PATH:-"/tmp/auto/screenshots"} + TP_AUTO_REPORT_PATH: ${GUI_TP_AUTO_REPORT_PATH:-"/tmp/auto/report"} + TP_AUTO_REPORT_YAML_FILE: report.yaml TP_AUTO_GITHUB_REPO_NAME: ${GUI_TP_AUTO_GITHUB_REPO_NAME:-"github.com/TIBCOSoftware/platform-provisioner"} TP_AUTO_GITHUB_REPO_PATH: ${GUI_TP_AUTO_GITHUB_REPO_PATH:-"docs/recipes/automation/tp-setup/bootstrap"} - TP_AUTO_GITHUB_REPO_BRANCH: ${GUI_TP_AUTO_GITHUB_REPO_BRANCH:-"provisioner-config-local-1.1.16"} + TP_AUTO_GITHUB_REPO_BRANCH: ${GUI_TP_AUTO_GITHUB_REPO_BRANCH:-"provisioner-config-local-{{ .Chart.Version }}"} # DP capabilities deployment TP_AUTO_ACTIVE_USER: ${GUI_TP_AUTO_ACTIVE_USER:-true} @@ -127,6 +129,7 @@ meta: TP_AUTO_K8S_DP_NAMESPACE: ${GUI_TP_AUTO_K8S_DP_NAMESPACE:-"k8s-auto-dp1ns"} TP_AUTO_K8S_DP_SERVICE_ACCOUNT: ${GUI_TP_AUTO_K8S_DP_SERVICE_ACCOUNT:-"k8s-auto-dp1sa"} TP_AUTO_INGRESS_CONTROLLER: ${GUI_TP_AUTO_INGRESS_CONTROLLER:-"nginx"} + TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME: ${GUI_TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME:-"nginx"} TP_AUTO_STORAGE_CLASS: ${GUI_TP_AUTO_STORAGE_CLASS:-"hostpath"} # DP o11y setup @@ -145,6 +148,7 @@ meta: # flow control PYTHON_FILE_LOGIN_POINT: page_auth.py PYTHON_FILE_ENTRY_POINT: run.py + PYTHON_FILE_ENV_POINT: page_env.py TP_OVERWRITE_DNS: ${GUI_TP_OVERWRITE_DNS:-false} TP_PORT_FORWARD: ${GUI_TP_PORT_FORWARD:-true} tasks: @@ -190,8 +194,6 @@ tasks: content: | cd ${TP_AUTO_SCRIPT_FOLDER} pip install -r requirements.txt - rm -rf downloads - rm -rf screenshots - condition: ${TP_AUTO_ACTIVE_USER} # only create admin and subscription clusters: - name: ${TP_CLUSTER_NAME} @@ -200,6 +202,20 @@ tasks: fileName: script.sh content: | cd ${TP_AUTO_SCRIPT_FOLDER} + _report_file="${TP_AUTO_REPORT_PATH}/${TP_AUTO_REPORT_YAML_FILE}" + + if [[ -f "$_report_file" ]]; then + if [[ "$(yq '.ENV.REPORT_AUTO_ACTIVE_USER' "$_report_file")" == "true" ]]; then + echo "----------------------------------------------------------------" + echo "ENV.REPORT_AUTO_ACTIVE_USER is true. User has been active. Exiting..." + echo "----------------------------------------------------------------" + exit 0 + else + echo "_report_file: ${_report_file}" + cat "$_report_file" + fi + fi + python ${PYTHON_FILE_LOGIN_POINT} _result=$? echo "python return code: ${_result}" @@ -213,6 +229,21 @@ tasks: content: | export TP_AUTO_IS_CREATE_DP=${TP_AUTO_ENABLE_DP} cd ${TP_AUTO_SCRIPT_FOLDER} + _report_file="${TP_AUTO_REPORT_PATH}/${TP_AUTO_REPORT_YAML_FILE}" + if [[ -f "$_report_file" ]]; then + # Get all dataPlane.name and concatenate them into a,b,c format + dp_names=$(yq '.dataPlane[].name' "$_report_file" | tr '\n' ',' | sed 's/,$//') + if [[ -n "$dp_names" ]]; then + echo "----------------------------------------------------------------" + echo "dataPlane[$dp_names] already exists. Exiting..." + echo "----------------------------------------------------------------" + exit 0 + else + echo "_report_file: ${_report_file}" + cat "$_report_file" + fi + fi + python ${PYTHON_FILE_ENTRY_POINT} _result=$? echo "python return code: ${_result}" @@ -282,3 +313,15 @@ tasks: _result=$? echo "python return code: ${_result}" exit ${_result} +- condition: true + clusters: + - name: ${TP_CLUSTER_NAME} + script: + ignoreErrors: false + fileName: script.sh + content: | + cd ${TP_AUTO_SCRIPT_FOLDER} + python ${PYTHON_FILE_ENV_POINT} + _result=$? + echo "python return code: ${_result}" + exit ${_result} diff --git a/charts/provisioner-config-local/recipes/tp-base-on-prem-https.yaml b/charts/provisioner-config-local/recipes/tp-base-on-prem-https.yaml index bc99be4..a1a6b8b 100644 --- a/charts/provisioner-config-local/recipes/tp-base-on-prem-https.yaml +++ b/charts/provisioner-config-local/recipes/tp-base-on-prem-https.yaml @@ -32,7 +32,7 @@ meta: GUI_TP_DB_PASSWORD: postgres GUI_TP_DB_NAME: postgres GUI_TP_DB_TLS_ENABLED: false - GUI_TP_INSTALL_PROVISIONER_UI: true + GUI_TP_INSTALL_PROVISIONER_UI: false GUI_TP_INSTALL_CERT_MANAGER: true GUI_TP_INSTALL_METRICS_SERVER: true GUI_PIPELINE_LOG_DEBUG: false diff --git a/dev/platform-provisioner-test.sh b/dev/platform-provisioner-test.sh index 3311a13..fa5a979 100755 --- a/dev/platform-provisioner-test.sh +++ b/dev/platform-provisioner-test.sh @@ -65,6 +65,7 @@ cd "${DEV_PATH}" || exit # default network is host [[ -z "${PIPELINE_CONTAINER_NETWORK}" ]] && export PIPELINE_CONTAINER_NETWORK="host" +[[ -z "${PIPELINE_CONTAINER_TTY}" ]] && export PIPELINE_CONTAINER_TTY="-it" [[ -z "${PIPELINE_INPUT_RECIPE}" ]] && export PIPELINE_INPUT_RECIPE="recipe.yaml" [[ -z "${PIPELINE_TRIGGER_RUN_SH}" ]] && export PIPELINE_TRIGGER_RUN_SH="true" @@ -135,7 +136,7 @@ export PIPELINE_INPUT_RECIPE_CONTENT="" echo "Using platform provisioner docker image: ${PIPELINE_DOCKER_IMAGE}" # is used to export functions; so subshell can use it -docker run -it --rm \ +docker run "${PIPELINE_CONTAINER_TTY}" --rm \ --name provisioner-pipeline-task \ --net "${PIPELINE_CONTAINER_NETWORK}" \ -e ACCOUNT \ diff --git a/dev/platform-provisioner.sh b/dev/platform-provisioner.sh index 6f8e825..31546dc 100755 --- a/dev/platform-provisioner.sh +++ b/dev/platform-provisioner.sh @@ -57,6 +57,7 @@ export PIPELINE_CMD_NAME_YQ="${PIPELINE_CMD_NAME_YQ:-yq4}" # default network is host [[ -z "${PIPELINE_CONTAINER_NETWORK}" ]] && export PIPELINE_CONTAINER_NETWORK="host" +[[ -z "${PIPELINE_CONTAINER_TTY}" ]] && export PIPELINE_CONTAINER_TTY="-it" [[ -z "${PIPELINE_INPUT_RECIPE}" ]] && export PIPELINE_INPUT_RECIPE="recipe.yaml" [[ -z "${PIPELINE_TRIGGER_RUN_SH}" ]] && export PIPELINE_TRIGGER_RUN_SH="true" @@ -127,7 +128,7 @@ export PIPELINE_INPUT_RECIPE_CONTENT="" echo "Using platform provisioner docker image: ${PIPELINE_DOCKER_IMAGE}" # is used to export functions; so subshell can use it -docker run -it --rm \ +docker run "${PIPELINE_CONTAINER_TTY}" --rm \ --name provisioner-pipeline-task \ --net "${PIPELINE_CONTAINER_NETWORK}" \ -e ACCOUNT \ diff --git a/docs/recipes/automation/on-perm/README.md b/docs/recipes/automation/on-perm/README.md index 9af8ea8..8629876 100644 --- a/docs/recipes/automation/on-perm/README.md +++ b/docs/recipes/automation/on-perm/README.md @@ -1,9 +1,34 @@ -# On-Premises automation +# On-Premises setup automation The goal of this automation is to create full running TP on-perm environment from scratch with one script. -In the on-perm use case; we assume there will be a on-perm cluster running. The default target is Docker for Desktop. +In the on-perm use case; we assume there will be an on-perm cluster running. The default target is Docker for Desktop. +## Setup flow + +### 1. Generate recipe from provisioner-config-local helm chart +```bash +./generate-recipe.sh 1 1 +``` + +### 2. Adjust recipe for your k8s environment +```bash +# choose the environment you will deploy to +./adjust-recipe.sh +``` + +### 3. (Optional) Update recipe tokens +```bash +./update-recipe-tokens.sh +``` + +### 4. Install the full TP on-perm environment +Before trigger the run.sh script; you can manually set TP versions that you want to install on 02-tp-cp-on-perm.yaml file. +```bash +./run.sh 1 +``` + +## What happens in the run.sh script? Basically the `run.sh` script will: * Deploy on-perm tools like ingress, Postgres * Deploy TIBCO Platform Control Plane @@ -13,24 +38,8 @@ Basically the `run.sh` script will: * Deploy a DP * Deploy a capability -We can copy the following recipes from provisioner GUI. -``` -01-tp-on-perm.yaml -02-tp-cp-on-perm.yaml -03-tp-adjust-dns.yaml -04-tp-adjust-resource.yaml -05-tp-auto-deploy-dp.yaml -``` - -Or use the `generate-recipe.sh` script to generate the skeleton and manually modify. (Adding tokens and certificates) - - ## Local development process for python automation -* Use local repo to generate recipe -* Deploy CP subscription (Admin, sub user, DP, app, etc.) - -```shell -export GITHUB_TOKEN="" -./generate-recipe.sh 2 && ./run.sh 4 -``` +* Use local repo to generate recipe `./generate-recipe.sh 2 1` +* Deploy CP subscription normally `./run.sh 1` +* In this case the setup automation will mount `../tp-setup/bootstrap/` folder to the automation container. So you can edit the python automation code in your local machine and run the automation script in the container. diff --git a/docs/recipes/automation/on-perm/adjust-recipe.sh b/docs/recipes/automation/on-perm/adjust-recipe.sh index 57d9ed9..8cb19cb 100755 --- a/docs/recipes/automation/on-perm/adjust-recipe.sh +++ b/docs/recipes/automation/on-perm/adjust-recipe.sh @@ -5,6 +5,20 @@ # All Rights Reserved. Confidential & Proprietary. # +####################################### +# adjust-recipe.sh: this script will adjust the recipe for deploying TP on-prem for different k8s environments +# Globals: +# None +# Arguments: +# 1 - 4: the choice of the environment +# Returns: +# None +# Notes: +# Ideally we should use ./generate-recipe.sh to generate the recipe first before adjusting it. +# Samples: +# ./adjust-recipe.sh 1 +####################################### + # This script will generate the recipe for deploying TP on-prem function adjust_recipes() { local choice="${1:-""}" @@ -12,8 +26,9 @@ function adjust_recipes() { if [[ -z $choice ]]; then echo "Please select an option:" echo "1. Adjust for k3s" - echo "2. Adjust for openshift" - echo "3. Exit" + echo "2. Adjust for OpenShift" + echo "3. Adjust for Docker Desktop" + echo "4. Exit" read -rp "Enter your choice (1-3): " choice fi @@ -39,6 +54,7 @@ function adjust_recipes() { _recipe_file_name="04-tp-adjust-resource.yaml" if [[ -f "${_recipe_file_name}" ]]; then yq eval -i '(.meta.guiEnv.GUI_TASK_REMOVE_RESOURCES = false)' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TASK_SHOW_RESOURCES = false)' "$_recipe_file_name" fi _recipe_file_name="05-tp-auto-deploy-dp.yaml" @@ -76,6 +92,7 @@ function adjust_recipes() { _recipe_file_name="04-tp-adjust-resource.yaml" if [[ -f "${_recipe_file_name}" ]]; then yq eval -i '(.meta.guiEnv.GUI_TASK_REMOVE_RESOURCES = false)' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TASK_SHOW_RESOURCES = false)' "$_recipe_file_name" fi _recipe_file_name="05-tp-auto-deploy-dp.yaml" @@ -92,6 +109,43 @@ function adjust_recipes() { break ;; 3) + echo "Adjusting for Docker Desktop..." + _recipe_file_name="01-tp-on-perm.yaml" + export TP_STORAGE_CLASS="hostpath" + if [[ -f "${_recipe_file_name}" ]]; then + yq eval -i '(.meta.guiEnv.GUI_TP_INGRESS_SERVICE_TYPE = "LoadBalancer")' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TP_STORAGE_CLASS = env(TP_STORAGE_CLASS))' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TP_STORAGE_CLASS_FOR_NFS_SERVER_PROVISIONER = env(TP_STORAGE_CLASS))' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TP_INSTALL_NFS_SERVER_PROVISIONER = false)' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TP_INSTALL_METRICS_SERVER = true)' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TP_INSTALL_PROVISIONER_UI = false)' "$_recipe_file_name" + fi + + _recipe_file_name="02-tp-cp-on-perm.yaml" + if [[ -f "${_recipe_file_name}" ]]; then + yq eval -i '(.meta.guiEnv.GUI_CP_STORAGE_CLASS = "hostpath")' "$_recipe_file_name" + fi + + _recipe_file_name="04-tp-adjust-resource.yaml" + if [[ -f "${_recipe_file_name}" ]]; then + yq eval -i '(.meta.guiEnv.GUI_TASK_REMOVE_RESOURCES = true)' "$_recipe_file_name" + yq eval -i '(.meta.guiEnv.GUI_TASK_SHOW_RESOURCES = false)' "$_recipe_file_name" + fi + + _recipe_file_name="05-tp-auto-deploy-dp.yaml" + if [[ -f "${_recipe_file_name}" ]]; then + yq eval -i '(.meta.guiEnv.GUI_TP_AUTO_USE_LOCAL_SCRIPT = false)' ${_recipe_file_name} + yq eval -i '(.meta.guiEnv.GUI_TP_AUTO_USE_GITHUB_SCRIPT = true)' ${_recipe_file_name} + yq eval -i '(.meta.guiEnv.GUI_TP_AUTO_STORAGE_CLASS = env(TP_STORAGE_CLASS))' ${_recipe_file_name} + fi + + _recipe_file_name="06-tp-o11y-stack.yaml" + if [[ -f "${_recipe_file_name}" ]]; then + yq eval -i '(.meta.guiEnv.GUI_TP_STORAGE_CLASS = env(TP_STORAGE_CLASS))' "$_recipe_file_name" + fi + break + ;; + 4) echo "Exiting..." break ;; @@ -102,11 +156,9 @@ function adjust_recipes() { done } - # main function function main() { adjust_recipes "$@" } main "$@" - diff --git a/docs/recipes/automation/on-perm/generate-recipe.sh b/docs/recipes/automation/on-perm/generate-recipe.sh index c5ccc3d..bbcd598 100755 --- a/docs/recipes/automation/on-perm/generate-recipe.sh +++ b/docs/recipes/automation/on-perm/generate-recipe.sh @@ -1,10 +1,26 @@ #!/bin/bash # -# © 2024 Cloud Software Group, Inc. +# © 2024 - 2025 Cloud Software Group, Inc. # All Rights Reserved. Confidential & Proprietary. # +####################################### +# generate-recipe.sh: this script will generate the recipe for deploying TP on-prem +# Globals: +# None +# Arguments: +# 1 - 3: the choice of the source of the recipe +# Returns: +# None +# Notes: +# The recipe comes from provisioner-config-local chart. It can be generated from public repo or local repo. +# The default recipe values are designed for Docker Desktop environment. It can be adjusted for different k8s environments by running adjust-recipe.sh. +# After adjust the recipes; we can use ./update-recipe-tokens.sh to update the tokens for private repos. +# Samples: +# ./generate-recipe.sh 1 1 +####################################### + # This script will generate the recipe for deploying TP on-prem function select_recipe_source() { local choice="${1:-""}" @@ -35,7 +51,6 @@ function select_recipe_source() { # set recipe to use local script if [[ -f ${_recipe_file_name} ]]; then echo "Update recipe ${_recipe_file_name} to use local script..." - echo "set IS_LOCAL_AUTOMATION to true to run local script" export GUI_TP_AUTO_USE_LOCAL_SCRIPT=true export GUI_TP_AUTO_USE_GITHUB_SCRIPT=false yq eval -i '(.meta.guiEnv.GUI_TP_AUTO_USE_LOCAL_SCRIPT = env(GUI_TP_AUTO_USE_LOCAL_SCRIPT))' ${_recipe_file_name} @@ -98,6 +113,7 @@ function generate_recipe() { update_05-tp-auto-deploy-dp } +# Update the recipe file 05-tp-auto-deploy-dp.yaml update_05-tp-auto-deploy-dp() { local recipe_file="05-tp-auto-deploy-dp.yaml" @@ -121,6 +137,7 @@ update_05-tp-auto-deploy-dp() { done } +# Check if yq is installed check_yq() { if ! command -v yq &> /dev/null; then echo "Error: yq is not installed. Please install yq before running this script." diff --git a/docs/recipes/automation/on-perm/run.sh b/docs/recipes/automation/on-perm/run.sh index addfd6f..a2ca1f6 100755 --- a/docs/recipes/automation/on-perm/run.sh +++ b/docs/recipes/automation/on-perm/run.sh @@ -1,33 +1,33 @@ #!/bin/bash # -# © 2024 Cloud Software Group, Inc. +# © 2024 - 2025 Cloud Software Group, Inc. # All Rights Reserved. Confidential & Proprietary. # -# run with local platform-provisioner.sh script with export PIPELINE_SCRIPT=../../../../dev/platform-provisioner.sh +####################################### +# run.sh: this script will update the tokens for TIBCO Platform recipes. +# Globals: +# TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT: the retry count for deploying CP subscription +# PIPELINE_SCRIPT: the pipeline script +# Arguments: +# 1 - 9: the choice of the deployment combination +# Returns: +# None +# Notes: +# This will trigger the full deployment of TP on-prem. +# The script also have different options to deploy different parts of TP on-prem. +# Samples: +# ./run.sh 1 +####################################### + +export CURRENT_PATH=$(pwd) -export PIPELINE_ON_PREM_KUBECONFIG_FILE_NAME=config -# don't print debug log -export PIPELINE_LOG_DEBUG=false -# don't print recipe -export PIPELINE_RECIPE_PRINT=false # retry count export TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT=${TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT:-10} - -export PIPELINE_DOCKER_IMAGE_RUNNER=${PIPELINE_DOCKER_IMAGE_RUNNER:-"ghcr.io/tibcosoftware/platform-provisioner/platform-provisioner:latest"} -export PIPELINE_DOCKER_IMAGE_TESTER=${PIPELINE_DOCKER_IMAGE_TESTER:-"ghcr.io/tibcosoftware/platform-provisioner/platform-provisioner:v1.0.0-tester"} - -export CURRENT_PATH=$(pwd) export _PIPELINE_PUBLIC_SCRIPT='/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/TIBCOSoftware/platform-provisioner/main/dev/platform-provisioner.sh)"' export PIPELINE_SCRIPT="${PIPELINE_SCRIPT:-${_PIPELINE_PUBLIC_SCRIPT}}" -if [[ "${IS_LOCAL_AUTOMATION}" = "true" ]]; then - PIPELINE_SCRIPT="$(realpath "../../../../dev/platform-provisioner.sh")" - echo "Using local automation script: ${PIPELINE_SCRIPT}" - export PIPELINE_CONTAINER_OPTIONAL_PARAMETER="-v $(realpath '../tp-setup/bootstrap/'):/tmp/auto" -fi - # deploy-on-prem-base deploys base on-prem function deploy-on-prem-base() { local _recipe_file_name="01-tp-on-perm.yaml" @@ -161,93 +161,118 @@ function run-with-retry() { fi } -if [[ $# -gt 0 ]]; then - choice=$1 -fi -while true; do - if [[ -z $choice ]]; then - echo "Please select an option:" - echo "1. Deploy TP from scratch. (All steps: 2,5,7,3,7,4,6)" - echo "2. Prepare TP cluster (Ingress, DB, storage, etc.)" - echo "3. Deploy platform-bootstrap and platform-base only" - echo "4. Deploy CP subscription (Admin, sub user, DP, app, etc.)" - echo "5. Deploy o11y stack (Elastic, Prometheus, OTel Collector, etc.)" - echo "6. Deploy TP o11y resources (Config, o11y, etc.)" - echo "7. Cleanup resource (Remove resource limits, etc.)" - echo "8. Undeploy o11y stack then Redeploy o11y stack (dp-config-es-es-default-0 pod is pending)" - echo "9. Exit" - read -rp "Enter your choice (1-8): " choice +# main function +function main() { + # kubeconfig file name + export PIPELINE_ON_PREM_KUBECONFIG_FILE_NAME=${PIPELINE_ON_PREM_KUBECONFIG_FILE_NAME:-config} + # don't print debug log + export PIPELINE_LOG_DEBUG=${PIPELINE_LOG_DEBUG:-false} + # don't print recipe + export PIPELINE_RECIPE_PRINT=${PIPELINE_RECIPE_PRINT:-false} + # runner image + export PIPELINE_DOCKER_IMAGE_RUNNER=${PIPELINE_DOCKER_IMAGE_RUNNER:-"ghcr.io/tibcosoftware/platform-provisioner/platform-provisioner:latest"} + # tester image + export PIPELINE_DOCKER_IMAGE_TESTER=${PIPELINE_DOCKER_IMAGE_TESTER:-"ghcr.io/tibcosoftware/platform-provisioner/platform-provisioner:v1.0.0-tester"} + + if [[ -f 05-tp-auto-deploy-dp.yaml ]]; then + _IS_LOCAL_AUTOMATION=$(yq eval '.meta.guiEnv.GUI_TP_AUTO_USE_LOCAL_SCRIPT' 05-tp-auto-deploy-dp.yaml) + if [[ "${_IS_LOCAL_AUTOMATION}" = "true" ]]; then + PIPELINE_SCRIPT="$(realpath "../../../../dev/platform-provisioner.sh")" + echo "Using local automation script: ${PIPELINE_SCRIPT}" + export PIPELINE_CONTAINER_OPTIONAL_PARAMETER="-v $(realpath '../tp-setup/bootstrap/'):/tmp/auto" + fi fi - case $choice in - 1) - echo "Deploying TP from scratch..." - start_time=$(date +%s) - deploy-on-prem-base # 2 - deploy-tp-o11y-stack # 5 - post-deploy-cleanup-resource # 7 - echo "Wait for 30 seconds before deploying CP..." - sleep 30 - deploy-tp # 3 - echo "Finish deploy TP in $(($(date +%s) - start_time)) seconds" - post-deploy-adjust-dns # dns adjustment - post-deploy-cleanup-resource # 7 - - echo "Wait for 30 seconds before deploying CP subscription..." - sleep 30 - - # run with retry for # 4 - run-with-retry deploy-subscription "${TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT}" - - deploy-tp-o11y-resource # 6 - - end_time=$(date +%s) - total_time=$((end_time - start_time)) - echo "Total execution time: ${total_time} seconds" - break - ;; - 2) - echo "Prepare TP cluster..." - deploy-on-prem-base - break - ;; - 3) - echo "Deploying platform-bootstrap and platform-base..." - deploy-tp - break - ;; - 4) - echo "Deploying CP subscription..." - echo "Run with $PIPELINE_SCRIPT with retry count $TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT" - run-with-retry deploy-subscription "${TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT}" - break - ;; - 5) - echo "Deploying o11y stack..." - deploy-tp-o11y-stack - break - ;; - 6) - echo "Deploying TP o11y resources..." - deploy-tp-o11y-resource - break - ;; - 7) - echo "Cleaning up resources..." - post-deploy-cleanup-resource - break - ;; - 8) - echo "Undeploy o11y stack then Redeploy o11y stack..." - redeploy-tp-o11y-stack - break - ;; - 9) - echo "Exiting..." - break - ;; - *) - echo "Invalid option. Please try again." - ;; - esac -done + if [[ $# -gt 0 ]]; then + choice=$1 + fi + while true; do + if [[ -z $choice ]]; then + echo "Please select an option:" + echo "1. Deploy TP from scratch. (All steps: 2,5,7,3,7,4,6)" + echo "2. Prepare TP cluster (Ingress, DB, storage, etc.)" + echo "3. Deploy platform-bootstrap and platform-base only" + echo "4. Deploy CP subscription (Admin, sub user, DP, app, etc.)" + echo "5. Deploy o11y stack (Elastic, Prometheus, OTel Collector, etc.)" + echo "6. Deploy TP o11y resources (Config, o11y, etc.)" + echo "7. Cleanup resource (Remove resource limits, etc.)" + echo "8. Undeploy o11y stack then Redeploy o11y stack (dp-config-es-es-default-0 pod is pending)" + echo "9. Exit" + read -rp "Enter your choice (1-8): " choice + fi + + case $choice in + 1) + echo "Deploying TP from scratch..." + start_time=$(date +%s) + deploy-on-prem-base # 2 + deploy-tp-o11y-stack # 5 + post-deploy-cleanup-resource # 7 + echo "Wait for 30 seconds before deploying CP..." + sleep 30 + deploy-tp # 3 + echo "Finish deploy TP in $(($(date +%s) - start_time)) seconds" + post-deploy-adjust-dns # dns adjustment + post-deploy-cleanup-resource # 7 + + echo "Wait for 30 seconds before deploying CP subscription..." + sleep 30 + + # run with retry for # 4 + run-with-retry deploy-subscription "${TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT}" + + deploy-tp-o11y-resource # 6 + + end_time=$(date +%s) + total_time=$((end_time - start_time)) + echo "Total execution time: ${total_time} seconds" + break + ;; + 2) + echo "Prepare TP cluster..." + deploy-on-prem-base + break + ;; + 3) + echo "Deploying platform-bootstrap and platform-base..." + deploy-tp + break + ;; + 4) + echo "Deploying CP subscription..." + echo "Run with $PIPELINE_SCRIPT with retry count $TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT" + run-with-retry deploy-subscription "${TP_SUBSCRIPTION_DEPLOY_RETRY_COUNT}" + break + ;; + 5) + echo "Deploying o11y stack..." + deploy-tp-o11y-stack + break + ;; + 6) + echo "Deploying TP o11y resources..." + deploy-tp-o11y-resource + break + ;; + 7) + echo "Cleaning up resources..." + post-deploy-cleanup-resource + break + ;; + 8) + echo "Undeploy o11y stack then Redeploy o11y stack..." + redeploy-tp-o11y-stack + break + ;; + 9) + echo "Exiting..." + break + ;; + *) + echo "Invalid option. Please try again." + ;; + esac + done +} + +main "$@" diff --git a/docs/recipes/automation/on-perm/update-tokens.sh b/docs/recipes/automation/on-perm/update-recipe-tokens.sh similarity index 66% rename from docs/recipes/automation/on-perm/update-tokens.sh rename to docs/recipes/automation/on-perm/update-recipe-tokens.sh index 41abde8..30d2632 100755 --- a/docs/recipes/automation/on-perm/update-tokens.sh +++ b/docs/recipes/automation/on-perm/update-recipe-tokens.sh @@ -5,6 +5,26 @@ # All Rights Reserved. Confidential & Proprietary. # +####################################### +# update-recipe-tokens.sh: this script will update the tokens for TIBCO Platform recipes. +# Globals: +# GITHUB_TOKEN: the GitHub token +# GUI_CP_CONTAINER_REGISTRY: the JFrog Container Registry +# GUI_CP_CONTAINER_REGISTRY_REPOSITORY: the JFrog Container Registry Repository +# GUI_CP_CONTAINER_REGISTRY_USERNAME: the JFrog Container Registry Username +# GUI_CP_CONTAINER_REGISTRY_PASSWORD: the JFrog Container Registry Password +# GUI_TP_TLS_CERT: the SSL Certificate +# GUI_TP_TLS_KEY: the SSL Certificate Key +# Arguments: +# token as input (optional) +# Returns: +# None +# Notes: +# We can use the token environment variables to update the recipe files. +# Samples: +# ./update-recipe-tokens.sh +####################################### + export CURRENT_PATH=$(pwd) # set_github_token sets the GitHub token for TP private repo @@ -40,28 +60,37 @@ function set_jfrog_token() { echo "Recipe file ${_recipe_file_name} not found." return 0 fi - read -rp "Enter JFrog Container Registry (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY + + if [ -z "$GUI_CP_CONTAINER_REGISTRY" ]; then + read -rp "Enter JFrog Container Registry (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY + fi if [ -n "$GUI_CP_CONTAINER_REGISTRY" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_CP_CONTAINER_REGISTRY yq eval -i '(.meta.guiEnv.GUI_CP_CONTAINER_REGISTRY = env(GUI_CP_CONTAINER_REGISTRY))' "$_recipe_file_name" fi - read -rp "Enter JFrog Container Registry Repository (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_REPOSITORY + if [ -z "$GUI_CP_CONTAINER_REGISTRY_REPOSITORY" ]; then + read -rp "Enter JFrog Container Registry Repository (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_REPOSITORY + fi if [ -n "$GUI_CP_CONTAINER_REGISTRY_REPOSITORY" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_CP_CONTAINER_REGISTRY_REPOSITORY yq eval -i '(.meta.guiEnv.GUI_CP_CONTAINER_REGISTRY_REPOSITORY = env(GUI_CP_CONTAINER_REGISTRY_REPOSITORY))' "$_recipe_file_name" fi - read -rp "Enter JFrog Container Registry Username (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_USERNAME + if [ -z "$GUI_CP_CONTAINER_REGISTRY_USERNAME" ]; then + read -rp "Enter JFrog Container Registry Username (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_USERNAME + fi if [ -n "$GUI_CP_CONTAINER_REGISTRY_USERNAME" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_CP_CONTAINER_REGISTRY_USERNAME yq eval -i '(.meta.guiEnv.GUI_CP_CONTAINER_REGISTRY_USERNAME = env(GUI_CP_CONTAINER_REGISTRY_USERNAME))' "$_recipe_file_name" fi - read -rp "Enter JFrog Container Registry Password (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_PASSWORD + if [ -z "$GUI_CP_CONTAINER_REGISTRY_PASSWORD" ]; then + read -rp "Enter JFrog Container Registry Password (Press Enter to skip): " GUI_CP_CONTAINER_REGISTRY_PASSWORD + fi if [ -n "$GUI_CP_CONTAINER_REGISTRY_PASSWORD" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_CP_CONTAINER_REGISTRY_PASSWORD @@ -78,14 +107,18 @@ function set_ssl_cert() { return 0 fi - read -rp "Enter SSL Certificate (Press Enter to skip): " GUI_TP_TLS_CERT + if [ -z "$GUI_TP_TLS_CERT" ]; then + read -rp "Enter SSL Certificate (Press Enter to skip): " GUI_TP_TLS_CERT + fi if [ -n "$GUI_TP_TLS_CERT" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_TP_TLS_CERT yq eval -i '(.meta.guiEnv.GUI_TP_TLS_CERT = env(GUI_TP_TLS_CERT))' "$_recipe_file_name" fi - read -rp "Enter SSL Certificate Key (Press Enter to skip): " GUI_TP_TLS_KEY + if [ -z "$GUI_TP_TLS_KEY" ]; then + read -rp "Enter SSL Certificate Key (Press Enter to skip): " GUI_TP_TLS_KEY + fi if [ -n "$GUI_TP_TLS_KEY" ]; then echo "Update GitHub token for ${_recipe_file_name}..." export GUI_TP_TLS_KEY diff --git a/docs/recipes/automation/tp-setup/bootstrap/env.py b/docs/recipes/automation/tp-setup/bootstrap/env.py index e974ec5..8dcfaec 100644 --- a/docs/recipes/automation/tp-setup/bootstrap/env.py +++ b/docs/recipes/automation/tp-setup/bootstrap/env.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from datetime import datetime from color_logger import ColorLogger from helper import Helper import os @@ -6,6 +7,8 @@ @dataclass(frozen=True) class EnvConfig: IS_HEADLESS = Helper.is_headless() + + RETRY_TIME = datetime.now().strftime("%Y%m%d-%H%M%S") or "" GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") or "" DP_HOST_PREFIX = os.environ.get("DP_HOST_PREFIX") or "cp-sub1" DP_USER_EMAIL = os.environ.get("DP_USER_EMAIL") or "cp-sub1@tibco.com" @@ -15,7 +18,8 @@ class EnvConfig: # automation setup TP_AUTO_CP_VERSION = os.environ.get("TP_AUTO_CP_VERSION") or Helper.get_cp_version() or "1.3" - TP_AUTO_SCREENSHOT_PATH = os.environ.get("TP_AUTO_SCREENSHOT_PATH") or os.path.join(os.getcwd(), "screenshots") + TP_AUTO_REPORT_PATH = os.environ.get("TP_AUTO_REPORT_PATH") or os.path.join(os.getcwd(), "report") + TP_AUTO_REPORT_YAML_FILE = os.environ.get("TP_AUTO_REPORT_YAML_FILE") or "report.yaml" TP_AUTO_IS_CREATE_DP = os.environ.get("TP_AUTO_IS_CREATE_DP", "false").lower() == "true" TP_AUTO_IS_CONFIG_O11Y = os.environ.get("TP_AUTO_IS_CONFIG_O11Y", "false").lower() == "true" @@ -63,6 +67,7 @@ class EnvConfig: # data plane config TP_AUTO_INGRESS_CONTROLLER = os.environ.get("TP_AUTO_INGRESS_CONTROLLER") or "nginx" + TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME = os.environ.get("TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME") or "nginx" TP_AUTO_INGRESS_CONTROLLER_BWCE = os.environ.get("TP_AUTO_INGRESS_CONTROLLER_BWCE") or f"{TP_AUTO_INGRESS_CONTROLLER}-{TP_AUTO_CP_DNS_DOMAIN_PREFIX_BWCE}" TP_AUTO_INGRESS_CONTROLLER_FLOGO = os.environ.get("TP_AUTO_INGRESS_CONTROLLER_FLOGO") or f"{TP_AUTO_INGRESS_CONTROLLER}-{TP_AUTO_CP_DNS_DOMAIN_PREFIX_FLOGO}" TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB = os.environ.get("TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB") or f"{TP_AUTO_INGRESS_CONTROLLER}-{TP_AUTO_CP_DNS_DOMAIN_PREFIX_TIBCOHUB}" @@ -78,10 +83,8 @@ class EnvConfig: # need to make sure the flogo app name is unique and lower case in above json file FLOGO_APP_NAME = Helper.get_app_name(FLOGO_APP_FILE_NAME) - # mutable class attributes - FLOGO_APP_STATUS = "" - def pre_check(self): + ColorLogger.info(f"Current Retry time at '{self.RETRY_TIME}'") ColorLogger.info(f"Current CP version is '{self.TP_AUTO_CP_VERSION}'") ColorLogger.info(f"Headless mode is {self.IS_HEADLESS}") if not self.GITHUB_TOKEN: @@ -114,7 +117,4 @@ def pre_check(self): if not os.environ.get("CP_ADMIN_PASSWORD"): ColorLogger.warning(f"CP_ADMIN_PASSWORD is not set, will use default: {self.CP_ADMIN_PASSWORD}") - def set_flogo_app_status(self, status: str): - object.__setattr__(self, "FLOGO_APP_STATUS", status) - ENV = EnvConfig() diff --git a/docs/recipes/automation/tp-setup/bootstrap/helper.py b/docs/recipes/automation/tp-setup/bootstrap/helper.py index b52e9c3..ae45a73 100644 --- a/docs/recipes/automation/tp-setup/bootstrap/helper.py +++ b/docs/recipes/automation/tp-setup/bootstrap/helper.py @@ -14,27 +14,6 @@ def is_headless(): return True return os.environ.get("HEADLESS", "true").lower() == "true" - @staticmethod - def download_file(file_obj, filename): - """ - Downloads a file and saves it to the 'downloads' directory. - - Args: - file_obj: The content of the file to be saved. It should have a `save_as` method. - filename (str): The name of the file to be saved. - - Returns: - str: The path to the saved file. - """ - # Create 'downloads' folder if it does not exist - steps_dir = os.path.join(os.getcwd(), "downloads") - os.makedirs(steps_dir, exist_ok=True) - # Define the full file path - file_path = os.path.join(steps_dir, filename) - # Save the file content to the specified path - file_obj.save_as(file_path) - return file_path - @staticmethod def run_shell_file(script_path): # Check if the script file exists @@ -63,9 +42,24 @@ def run_shell_file(script_path): # Handle any unexpected exceptions print(f"An unexpected error occurred: {e}") + @staticmethod + def run_command(commands): + try: + result = subprocess.run( + commands, + capture_output=True, + text=True, + check=True + ) + return result.stdout + except subprocess.CalledProcessError as e: + print(f"Error running yq command: {e}") + return None + @staticmethod def get_command_output(command): try: + command = f"{Helper.get_kube_config_path()} {command}" result = subprocess.run( command, shell=True, @@ -75,9 +69,17 @@ def get_command_output(command): ) return result.stdout.strip() # Return standard output except subprocess.CalledProcessError as e: + print(f"Failed command: {command}") print(f"Command failed with error: {e.stderr.strip()}") return None + @staticmethod + def get_kube_config_path(): + tp_auto_kubeconfig = os.environ.get("TP_AUTO_KUBECONFIG") + if tp_auto_kubeconfig: + return f"KUBECONFIG={tp_auto_kubeconfig}" + return "" + @staticmethod def get_elastic_password(): return Helper.get_command_output("kubectl get secret -n elastic-system dp-config-es-es-elastic-user -o=jsonpath='{.data.elastic}' | base64 --decode; echo") diff --git a/docs/recipes/automation/tp-setup/bootstrap/page_auth.py b/docs/recipes/automation/tp-setup/bootstrap/page_auth.py index 7276127..dc464a8 100644 --- a/docs/recipes/automation/tp-setup/bootstrap/page_auth.py +++ b/docs/recipes/automation/tp-setup/bootstrap/page_auth.py @@ -1,6 +1,7 @@ from color_logger import ColorLogger from util import Util from env import ENV +from report import ReportYaml def active_user_in_mail(email, is_admin=False): ColorLogger.info(f"Active user {email} in mail...") @@ -20,6 +21,7 @@ def active_user_in_mail(email, is_admin=False): response = page.goto(ENV.TP_AUTO_MAIL_URL, timeout=5000) if response and response.status == 200: print(f"URL {ENV.TP_AUTO_MAIL_URL} is accessible") + ReportYaml.set(".ENV.REPORT_TP_AUTO_MAIL", True) except Exception as e: Util.exit_error(f"An error occurred while accessing {ENV.TP_AUTO_MAIL_URL}: {e}", page, "active_user_in_mail_1.png") @@ -55,7 +57,7 @@ def active_user_in_mail(email, is_admin=False): new_page.locator("#ta-sign-in-button").click() ColorLogger.success(f"User {email} has been active in new window completed.") else: - Util.exit_error(f"Can not active Email for {email}", page, "active_user_in_mail_3.png") + Util.exit_error(f"Can not active Email for {email}", new_page, "active_user_in_mail_3.png") new_page.close() @@ -87,6 +89,7 @@ def login_admin_user(): page.locator(".pcp-page-title", has_text="Welcome").wait_for(state="visible") ColorLogger.success(f"Admin user {ENV.CP_ADMIN_EMAIL} login successful.") + ReportYaml.set(".ENV.REPORT_TP_AUTO_ADMIN", True) def logout_admin_user(): ColorLogger.info(f"Loging out admin user...") @@ -207,6 +210,7 @@ def login(page): if Util.check_dom_visibility(page, page.locator("#user-profile"), 5, 10, True): page.wait_for_selector('#user-profile') print(f"User {ENV.DP_USER_EMAIL} profile is displayed...") + ReportYaml.set(".ENV.REPORT_AUTO_ACTIVE_USER", True) ColorLogger.success("Login successful!") page.wait_for_timeout(1000) else: diff --git a/docs/recipes/automation/tp-setup/bootstrap/page_env.py b/docs/recipes/automation/tp-setup/bootstrap/page_env.py new file mode 100644 index 0000000..df380cd --- /dev/null +++ b/docs/recipes/automation/tp-setup/bootstrap/page_env.py @@ -0,0 +1,4 @@ +from util import Util + +if __name__ == "__main__": + Util.print_env_info() diff --git a/docs/recipes/automation/tp-setup/bootstrap/report.py b/docs/recipes/automation/tp-setup/bootstrap/report.py new file mode 100644 index 0000000..4912ff4 --- /dev/null +++ b/docs/recipes/automation/tp-setup/bootstrap/report.py @@ -0,0 +1,171 @@ +import os +import subprocess +from env import ENV +import json + +class ReportYamlHandler: + def __init__(self, env): + self.yaml_folder = env.TP_AUTO_REPORT_PATH + self.yaml_file_path = os.path.join(self.yaml_folder, env.TP_AUTO_REPORT_YAML_FILE) + os.makedirs(self.yaml_folder, exist_ok=True) + + if not os.path.exists(self.yaml_file_path): + with open(self.yaml_file_path, "w") as f: + f.write("\n") + + def set(self, key, value=None): + command = key if value is None else f'{key}={self.format_value(value)}' + print(f"Setting YAML key-value pair: {command}") + self._run_yq_command(["-i", command]) + + def get(self, key): + """Retrieve the value of a given key from the YAML file.""" + output = self._run_yq_command([key]) + if not output or output.strip() == "null": + return None + return output.strip() + + def set_dataplane(self, dp_name): + if dp_name in self.get_dataplanes(): + return + self.set(f""" + .dataPlane |= ( + map(select(.name != "{dp_name}")) + [{{"name": "{dp_name}"}} | select(map(.name))] + ) + """) + + def get_dataplanes(self): + dps = self.get("(.dataPlane[].name)") + return dps.split("\n") if dps else [] + + def set_dataplane_info(self, dp_name, dp_key, dp_value): + dp_value = self.format_value(dp_value) + self.set(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + ) += {{"{dp_key}": {dp_value}}} + """) + + def get_dataplane_info(self, dp_name, dp_key): + return self.get(f""" + (.dataPlane[] + | select(.name == "{dp_name}").{dp_key} + ) + """) + + def set_capability(self, dp_name, capability): + if dp_name in self.get_capabilities(dp_name): + return + self.set(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability + ) |= (map(select(.name != "{capability}")) + [{{"name": "{capability}"}}]) + """) + + def get_capabilities(self, dp_name): + capabilities = self.get(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[].name + ) + """) + return capabilities.split("\n") if capabilities else [] + + def set_capability_info(self, dp_name, capability, app_key, app_value): + app_value = self.format_value(app_value) + self.set(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[] + | select(.name == "{capability}") + ) += {{"{app_key}": {app_value}}} + """) + + def set_capability_app(self, dp_name, capability, app_name): + if app_name in self.get_capability_apps(dp_name, capability): + return + self.set(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[] + | select(.name == "{capability}").app + ) |= (map(select(.name != "{app_name}")) + [{{"name": "{app_name}"}}]) + """) + + def get_capability_apps(self, dp_name, capability): + apps = self.get(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[] + | select(.name == "{capability}").app[].name + ) + """) + return apps.split("\n") if apps else [] + + def set_capability_app_info(self, dp_name, capability, app_name, app_key, app_value): + app_value = self.format_value(app_value) + self.set(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[] + | select(.name == "{capability}") + | .app[] + | select(.name == "{app_name}") + ) |= . + {{"{app_key}": {app_value}}} + """) + + def get_capability_app_info(self, dp_name, capability, app_name, app_key): + return self.get(f""" + (.dataPlane[] + | select(.name == "{dp_name}") + | .capability[] + | select(.name == "{capability}") + | .app[] + | select(.name == "{app_name}").{app_key} + ) + """) + + @staticmethod + def format_value(value): + if isinstance(value, bool): + value = "true" if value else "false" + elif isinstance(value, (int, float)): + value = str(value) + elif isinstance(value, str): + # Use json.dumps to ensure the string format is correct (with quotes) + value = json.dumps(value) + elif isinstance(value, list): + value = "[" + ", ".join(json.dumps(item) for item in value) + "]" + elif isinstance(value, dict): + # Use json.dumps to ensure the string format is correct (with quotes) + value = json.dumps(value) + + return value + + def sort_yaml_order(self): + self.set(f""" + {{"ENV": .ENV, "dataPlane": .dataPlane}} + """) + self.set(f""" + .dataPlane[] |= ( + {{"name": .name, "storage": .storage, "o11yConfig": .o11yConfig, "nginx-flogo": ."nginx-flogo", "nginx-bwce": ."nginx-bwce", "capability": .capability}} + ) + """) + + + def _run_yq_command(self, args): + """Run a yq command with the given arguments.""" + try: + result = subprocess.run( + ["yq", *args, self.yaml_file_path], + capture_output=True, + text=True, + check=True + ) + return result.stdout + except subprocess.CalledProcessError as e: + print(f"Error running yq command: {e}") + return None + +ReportYaml = ReportYamlHandler(ENV) diff --git a/docs/recipes/automation/tp-setup/bootstrap/run.py b/docs/recipes/automation/tp-setup/bootstrap/run.py index 820c7d1..81b902a 100644 --- a/docs/recipes/automation/tp-setup/bootstrap/run.py +++ b/docs/recipes/automation/tp-setup/bootstrap/run.py @@ -5,6 +5,7 @@ from util import Util from helper import Helper from env import ENV +from report import ReportYaml def grant_permission(permission): ColorLogger.info(f"Granting permission for {permission}...") @@ -29,7 +30,7 @@ def set_user_permission(): print("Start set user permission...") page.click("#nav-bar-menu-item-usrMgmt") page.click("#users-menu-item") - page.wait_for_selector(f'.user-name-text[id="go-to-user-details-{ENV.DP_USER_EMAIL}"]') + page.locator(f'.user-name-text[id="go-to-user-details-{ENV.DP_USER_EMAIL}"]').wait_for(state="visible") print(f"{ENV.DP_USER_EMAIL} is found.") # check if user has all permissions page.locator(f'.user-name-text[id="go-to-user-details-{ENV.DP_USER_EMAIL}"]').click() @@ -64,6 +65,7 @@ def k8s_create_dataplane(dp_name): Util.exit_error("Too many data planes, please delete some data planes first.", page, "k8s_create_dataplane.png") if page.locator('.data-plane-name', has_text=dp_name).is_visible(): + ReportYaml.set_dataplane(dp_name) ColorLogger.success(f"DataPlane '{dp_name}' is already created.") return @@ -121,18 +123,20 @@ def k8s_create_dataplane(dp_name): Util.exit_error(f"Data Plane '{dp_name}' creation failed.", page, "k8s_create_dataplane_finish.png") download_commands = page.locator("#download-commands") - command_count = download_commands.count() - commands_title = ["Namespace creation", "Service Account creation", "Cluster Registration"] + # command_count = download_commands.count() + # commands_title = ["Namespace creation", "Service Account creation", "Cluster Registration"] + commands_title = page.locator(".register-data-plane p.title").all_text_contents() + print("commands_title:", commands_title) # for different release version: 1.3 => 3 commands, 1.4 and above => 4 commands # If the command count is more than 3, add Helm Repository configuration as the first command - if command_count > 3: - commands_title.insert(0, "Helm Repository configuration") + # if command_count > 3: + # commands_title.insert(0, "Helm Repository configuration") # Execute each command dynamically based on its position - for index, description in enumerate(commands_title): - if index < command_count: - k8s_run_dataplane_command(dp_name, description, download_commands.nth(index), index + 1) + for index, step_name in enumerate(commands_title): + # if index < command_count: + k8s_run_dataplane_command(dp_name, step_name, download_commands.nth(index), index + 1) # click Done button page.click("#data-plane-finished-btn") @@ -142,7 +146,7 @@ def k8s_create_dataplane(dp_name): # verify data plane is created in the list page.wait_for_timeout(2000) - print(f"Verify Data Plane {dp_name} is created in the list") + print(f"Verifying Data Plane {dp_name} is created in the list") k8s_wait_tunnel_connected(dp_name) def k8s_run_dataplane_command(dp_name, step_name, download_selector, step): @@ -152,7 +156,7 @@ def k8s_run_dataplane_command(dp_name, step_name, download_selector, step): download_selector.click() file_name = f"{dp_name}_{step}.sh" - file_path = Helper.download_file(download_info.value, file_name) + file_path = Util.download_file(download_info.value, file_name) print(f"Run command for: {step_name}") Helper.run_shell_file(file_path) @@ -168,6 +172,7 @@ def k8s_wait_tunnel_connected(dp_name): Util.exit_error(f"DataPlane {dp_name} is not created.", page, "k8s_wait_tunnel_connected_1.png") ColorLogger.success(f"DataPlane {dp_name} is created, waiting for tunnel connected.") + ReportYaml.set_dataplane(dp_name) data_plane_card = page.locator(".data-plane-card", has=page.locator('.data-plane-name', has_text=dp_name)) print(f"Waiting for DataPlane {dp_name} tunnel connected...") if not Util.check_dom_visibility(page, data_plane_card.locator('.tunnel-status svg.green'), 10, 180): @@ -319,6 +324,7 @@ def o11y_config_dataplane_resource(dp_name=""): if not add_new_resource_button.is_visible(): print("'Add new resource' button is not exist...") ColorLogger.success(f"Data plane '{dp_title}' Observability Resources is already configured.") + ReportYaml.set_dataplane_info(dp_name, "o11yConfig", True) return add_new_resource_button.click() @@ -422,6 +428,7 @@ def o11y_config_dataplane_resource(dp_name=""): return ColorLogger.success(f"Data plane '{dp_title}' Observability Resources is configured.") + ReportYaml.set_dataplane_info(dp_name, "o11yConfig", True) print(f"Wait 5 seconds for Data plane '{dp_title}' configuration page redirect.") page.wait_for_timeout(5000) @@ -444,7 +451,7 @@ def o11y_new_resource_fill_form(menu_name, tab_name, tab_sub_name, name_input, d ColorLogger.info("O11y start to fill new resource form...") dp_title = dp_name if dp_name else "Global" print(f"Fill form for Data Plane: {dp_title} -> O11y-> {menu_name} -> {tab_name} ...") - page.wait_for_selector("configuration-modal .pl-modal") + page.locator("configuration-modal .pl-modal").wait_for(state="visible") page.fill("#config-name-input", name_input) page.locator("configuration-modal input.pl-select__control").click() print(f"Clicked 'Query Service type' dropdown") @@ -502,6 +509,7 @@ def dp_config_resources_storage(): page.wait_for_timeout(2000) if page.locator("#storage-resource-table tr td:first-child", has_text=resource_name).is_visible(): ColorLogger.success(f"Storage '{resource_name}' is already created.") + ReportYaml.set_dataplane_info(ENV.TP_AUTO_K8S_DP_NAME, "storage", True) else: print(f"Adding Storage '{resource_name}', and wait for 'Add Storage Class' button ...") page.locator("#add-storage-resource-btn").wait_for(state="visible") @@ -513,23 +521,28 @@ def dp_config_resources_storage(): page.fill('#description-input', resource_name) page.fill('#storageClassName-input', resource_name) print(f"Filled Storage Class, {resource_name}") - page.wait_for_timeout(1000) - page.locator("#save-storage-configuration").click() + Util.click_button_until_enabled(page, page.locator("#save-storage-configuration")) print("Clicked 'Add' button") - page.wait_for_timeout(1000) - ColorLogger.success(f"Add Storage '{resource_name}' successfully.") + if Util.check_dom_visibility(page, page.locator("#storage-resource-table tr td:first-child", has_text=resource_name), 3, 6): + ColorLogger.success(f"Add Storage '{resource_name}' successfully.") + ReportYaml.set_dataplane_info(ENV.TP_AUTO_K8S_DP_NAME, "storage", True) -def dp_config_resources_ingress(ingress_controller, resource_name, fqdn): +def dp_config_resources_ingress(ingress_controller, resource_name, ingress_class_name, fqdn): ColorLogger.info("Config Data Plane Resources Ingress...") page.locator("#resources-menu-item .menu-item-text", has_text="Resources").wait_for(state="visible") page.locator("#resources-menu-item .menu-item-text", has_text="Resources").click() print("Clicked 'Resources' left side menu") - page.locator("#toggle-ingress-expansion").click() - print("Clicked expand Icon, and wait for Ingress Controller table") - page.wait_for_timeout(5000) + page.locator("#toggle-ingress-expansion svg use").wait_for(state="visible") + expected_icon = 'pl-icon-caret-right' + if expected_icon in (page.query_selector("#toggle-ingress-expansion svg use") or {}).get_attribute("xlink:href"): + page.locator("#toggle-ingress-expansion").click() + print("Clicked expand Icon, and wait for Ingress Controller table") + page.wait_for_timeout(3000) + print(f"Check if Ingress Controller '{resource_name}' is exist...") if page.locator("#ingress-resource-table tr td:first-child", has_text=resource_name).is_visible(): ColorLogger.success(f"Ingress Controller '{resource_name}' is already created.") + ReportYaml.set_dataplane_info(ENV.TP_AUTO_K8S_DP_NAME, resource_name, True) else: print(f"Ingress Controller table do not have '{resource_name}'") print(f"Adding Ingress Controller '{resource_name}', and wait for 'Add Ingress Controller' button ...") @@ -548,8 +561,8 @@ def dp_config_resources_ingress(ingress_controller, resource_name, fqdn): print(f"Selected '{ingress_controller}' in Ingress Controller dropdown") page.fill('#resourceName-input', resource_name) print(f"Filled Resource Name: {resource_name}") - page.fill('#ingressClassName-input', resource_name) - print(f"Filled Ingress Class Name: {resource_name}") + page.fill('#ingressClassName-input', ingress_class_name) + print(f"Filled Ingress Class Name: {ingress_class_name}") page.fill('#fqdn-input', fqdn) print(f"Filled FQDN: {fqdn}") @@ -568,8 +581,7 @@ def dp_config_resources_ingress(ingress_controller, resource_name, fqdn): # print("Clicked 'Save' button") # page.wait_for_timeout(500) - page.wait_for_timeout(1000) - page.locator("#save-ingress-configuration").click() + Util.click_button_until_enabled(page, page.locator("#save-ingress-configuration")) print("Clicked 'Add' button") page.wait_for_timeout(1000) if page.locator(".pl-notification--error").is_visible(): @@ -578,13 +590,16 @@ def dp_config_resources_ingress(ingress_controller, resource_name, fqdn): page.locator("#cancel-ingress-configuration").click() print("Clicked 'Cancel' button") return - ColorLogger.success(f"Add Ingress Controller '{resource_name}' successfully.") + if Util.check_dom_visibility(page, page.locator("#ingress-resource-table tr td:first-child", has_text=resource_name), 3, 6): + ColorLogger.success(f"Add Ingress Controller '{resource_name}' successfully.") + ReportYaml.set_dataplane_info(ENV.TP_AUTO_K8S_DP_NAME, resource_name, True) def flogo_provision_capability(dp_name): ColorLogger.info("Flogo Provisioning capability...") goto_dataplane(dp_name) if page.locator("capability-card #flogo").is_visible(): ColorLogger.success("Flogo capability is already provisioned.") + ReportYaml.set_capability(dp_name, "flogo") return print("Checking if 'Provision a capability' button is visible.") @@ -602,10 +617,15 @@ def flogo_provision_capability(dp_name): print("Flogo capability page is loaded") page.wait_for_timeout(3000) - page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Storage Class for Flogo capability") - page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO}' Ingress Controller for Flogo capability") + if page.locator('#storage-class-resource-table').is_visible(): + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Storage Class for Flogo capability") + + if page.locator('#ingress-resource-table').is_visible(): + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO)).locator('label').wait_for(state="visible") + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO}' Ingress Controller for Flogo capability") page.locator("#btnNextCapabilityProvision").click() print("Clicked Flogo 'Next' button, finished step 1") @@ -626,6 +646,7 @@ def flogo_provision_capability(dp_name): page.wait_for_timeout(5000) if is_capability_provisioned("Flogo"): ColorLogger.success("Flogo capability is in capability list") + ReportYaml.set_capability(dp_name, "flogo") else: Util.warning_screenshot("Flogo capability is not in capability list", page, "flogo_provision_capability-2.png") @@ -635,7 +656,9 @@ def flogo_provision_connector(dp_name): # program will exit if Flogo capability is not provisioned yet goto_capability(dp_name, "Flogo") - print("Flogo Checking connectors...") + page.locator(".capability-connectors-container .total-capability").wait_for(state="visible") + page.wait_for_timeout(1000) + print("Flogo capability page loaded, Checking connectors...") if page.locator(".capability-connectors-container .total-capability", has_text="(2)").is_visible(): ColorLogger.success("Flogo connectors are already provisioned.") return @@ -839,6 +862,7 @@ def flogo_app_start(dp_name, app_name): is_app_running = page.locator("flogo-app-run-status .scale-status-text", has_text="Running").is_visible() or page.locator("flogo-app-run-status button", has_text="Stop").is_visible() if is_app_running: ColorLogger.success(f"Flogo app '{app_name}' is already running.") + ReportYaml.set_capability_app_info(ENV.TP_AUTO_K8S_DP_NAME, "flogo", app_name, "status", "Running") else: page.locator("flogo-app-run-status button", has_text="Start").click() print("Clicked 'Start' app button") @@ -847,6 +871,7 @@ def flogo_app_start(dp_name, app_name): if Util.check_dom_visibility(page, page.locator("flogo-app-run-status .scale-status-text", has_not_text="Scaling"), 15, 180, True): app_status = page.locator("flogo-app-run-status .scale-status-text").inner_text() ColorLogger.success(f"Flogo app '{app_name}' status is '{app_status}' now.") + ReportYaml.set_capability_app_info(ENV.TP_AUTO_K8S_DP_NAME, "flogo", app_name, "status", app_status) else: Util.warning_screenshot(f"Wait too long to scale Flogo app '{app_name}'.", page, "flogo_app_start.png") @@ -871,7 +896,7 @@ def flogo_app_test_endpoint(dp_name, app_name): new_page.wait_for_load_state() print(f"Waiting for Swagger title '{app_name}' to be displayed.") - if Util.check_dom_visibility(new_page, new_page.locator("#swagger-editor h2.title", has_text=app_name), 5, 20, True): + if Util.check_dom_visibility(new_page, new_page.locator("#swagger-editor h2.title", has_text=app_name), 5, 15, True): new_page.locator("#swagger-editor h2.title", has_text=app_name).wait_for(state="visible") print(f"The Swagger title '{app_name}' is displayed.") @@ -886,7 +911,7 @@ def flogo_app_test_endpoint(dp_name, app_name): print("Closed Swagger page") ColorLogger.success(f"Test Flogo app '{app_name}', endpoint '/flogo'") else: - Util.warning_screenshot(f"Swagger page is not loaded, title '{app_name}' is not displayed.", page, "flogo_app_test_endpoint.png") + Util.warning_screenshot(f"Swagger page is not loaded, title '{app_name}' is not displayed.", new_page, "flogo_app_test_endpoint.png") else: Util.warning_screenshot(f"'Test' button is not visible in Flogo app {app_name}, need to config it and start app.", page, "flogo_app_test_endpoint.png") @@ -897,6 +922,8 @@ def flogo_is_app_created(app_name): page.wait_for_timeout(3000) if page.locator("#app-list-table tr.pl-table__row td.app-name", has_text=app_name).is_visible(): ColorLogger.success(f"Flogo app '{app_name}' is already created.") + ReportYaml.set_capability(ENV.TP_AUTO_K8S_DP_NAME, "flogo") + ReportYaml.set_capability_app(ENV.TP_AUTO_K8S_DP_NAME, "flogo", app_name) return True else: print(f"Flogo app '{app_name}' has not been created.") @@ -912,7 +939,7 @@ def flogo_is_app_running(app_name): page.wait_for_timeout(3000) if page.locator("#app-list-table tr.FLOGO", has=page.locator("td.app-name", has_text=app_name)).locator("td", has_text="Running").is_visible(): ColorLogger.success(f"Flogo app '{app_name}' is already running.") - ENV.set_flogo_app_status("Running") + ReportYaml.set_capability_app_info(ENV.TP_AUTO_K8S_DP_NAME, "flogo", app_name, "status", "Running") return True else: print(f"Flogo app '{app_name}' has not been running.") @@ -926,6 +953,7 @@ def bwce_provision_capability(dp_name): goto_dataplane(dp_name) if page.locator("capability-card #bwce").is_visible(): ColorLogger.success("BWCE capability is already provisioned.") + ReportYaml.set_capability(dp_name, "bwce") return print("Checking if 'Provision a capability' button is visible.") @@ -943,8 +971,15 @@ def bwce_provision_capability(dp_name): print("BWCE capability page is loaded") page.wait_for_timeout(3000) - page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE}' Ingress Controller") + if page.locator('#storage-class-resource-table').is_visible(): + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Storage Class for BWCE capability") + + if page.locator('#ingress-resource-table').is_visible(): + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE)).locator('label').wait_for(state="visible") + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE}' Ingress Controller for BWCE capability") page.locator("#btnNextCapabilityProvision", has_text="Next").click() print("Clicked BWCE 'Next' button, finished step 1") @@ -965,6 +1000,7 @@ def bwce_provision_capability(dp_name): page.wait_for_timeout(5000) if is_capability_provisioned("BWCE"): ColorLogger.success("BWCE capability is in capability list") + ReportYaml.set_capability(dp_name, "bwce") else: Util.warning_screenshot("BWCE capability is not in capability list", page, "bwce_provision_capability-2.png") @@ -974,6 +1010,7 @@ def ems_provision_capability(dp_name, ems_server_name): goto_dataplane(dp_name) if page.locator("capability-card #ems .pl-tooltip__trigger", has_text=capability_name).is_visible(): ColorLogger.success("EMS capability is already provisioned.") + ReportYaml.set_capability(dp_name, "ems") return print("Checking if 'Provision a capability' button is visible.") @@ -992,11 +1029,15 @@ def ems_provision_capability(dp_name, ems_server_name): page.wait_for_timeout(3000) # step1: Resources - page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Message Storage") + if page.locator('#message-storage-resource-table').is_visible(): + page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Message Storage") - page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Log Storage") + if page.locator('#log-storage-resource-table').is_visible(): + page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Log Storage") page.locator("#btnNextCapabilityProvision", has_text="Next").click() print("Clicked EMS step 1 'Next' button") @@ -1031,6 +1072,7 @@ def ems_provision_capability(dp_name, ems_server_name): page.wait_for_timeout(5000) if is_capability_provisioned("EMS", capability_name): ColorLogger.success(f"EMS capability {capability_name} is in capability list") + ReportYaml.set_capability(dp_name, "ems") else: Util.warning_screenshot(f"EMS capability {capability_name} is not in capability list", page, "ems_provision_capability-2.png") @@ -1040,6 +1082,7 @@ def pulsar_provision_capability(dp_name, pulsar_server_name): goto_dataplane(dp_name) if page.locator("capability-card #pulsar .pl-tooltip__trigger", has_text=capability_name).is_visible(): ColorLogger.success("Pulsar capability is already provisioned.") + ReportYaml.set_capability(dp_name, "pulsar") return print("Checking if 'Provision a capability' button is visible.") @@ -1058,14 +1101,20 @@ def pulsar_provision_capability(dp_name, pulsar_server_name): page.wait_for_timeout(3000) # step1: Resources - page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Message Storage") + if page.locator('#message-storage-resource-table').is_visible(): + page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#message-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Message Storage") - page.locator('#journal-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Journal Storage") + if page.locator('#journal-storage-resource-table').is_visible(): + page.locator('#journal-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#journal-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Journal Storage") - page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Log Storage") + if page.locator('#log-storage-resource-table').is_visible(): + page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#log-storage-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Log Storage") page.locator("#btnNextCapabilityProvision", has_text="Next").click() print("Clicked Pulsar step 1 'Next' button") @@ -1089,6 +1138,7 @@ def pulsar_provision_capability(dp_name, pulsar_server_name): if Util.check_dom_visibility(page, page.get_by_text("Pulsar server is provisioned and ready to use!"), 5, 60): ColorLogger.success("Provision Pulsar capability successful.") + ReportYaml.set_capability(dp_name, "pulsar") page.locator("#btn_go_to_dta_pln").click() print("Clicked 'Go Back To Data Plane Details' button") else: @@ -1109,6 +1159,7 @@ def tibcohub_provision_capability(dp_name, hub_name): goto_dataplane(dp_name) if page.locator("capability-card #tibcohub .pl-tooltip__trigger", has_text=capability_name).is_visible(): ColorLogger.success("TibcoHub capability is already provisioned.") + ReportYaml.set_capability(dp_name, "tibcohub") return print("Checking if 'Provision a capability' button is visible.") @@ -1127,11 +1178,15 @@ def tibcohub_provision_capability(dp_name, hub_name): page.wait_for_timeout(3000) # step1: Resources for TibcoHub capability - page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Storage Class for TibcoHub capability") + if page.locator('#storage-class-resource-table').is_visible(): + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').wait_for(state="visible") + page.locator('#storage-class-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_STORAGE_CLASS)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_STORAGE_CLASS}' Storage Class for TibcoHub capability") - page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB)).locator('label').click() - print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB}' Ingress Controller for TibcoHub capability") + if page.locator('#ingress-resource-table').is_visible(): + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB)).locator('label').wait_for(state="visible") + page.locator('#ingress-resource-table tr', has=page.locator('td', has_text=ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB)).locator('label').click() + print(f"Selected '{ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB}' Ingress Controller for TibcoHub capability") page.locator("#btnNextCapabilityProvision", has_text="Next").click() print("Clicked TibcoHub step 1 'Next' button") @@ -1167,6 +1222,7 @@ def tibcohub_provision_capability(dp_name, hub_name): page.wait_for_timeout(5000) if is_capability_provisioned("TibcoHub", capability_name): ColorLogger.success(f"TibcoHub capability {capability_name} is in capability list") + ReportYaml.set_capability(dp_name, "tibcohub") else: ColorLogger.warning(f"TibcoHub capability {capability_name} is not in capability list") @@ -1191,11 +1247,20 @@ def tibcohub_provision_capability(dp_name, hub_name): goto_dataplane_config() dp_config_resources_storage() if ENV.TP_AUTO_IS_PROVISION_BWCE: - dp_config_resources_ingress(ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE, ENV.TP_AUTO_FQDN_BWCE) + dp_config_resources_ingress( + ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_BWCE, + ENV.TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME, ENV.TP_AUTO_FQDN_BWCE + ) if ENV.TP_AUTO_IS_PROVISION_FLOGO: - dp_config_resources_ingress(ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO, ENV.TP_AUTO_FQDN_FLOGO) + dp_config_resources_ingress( + ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_FLOGO, + ENV.TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME, ENV.TP_AUTO_FQDN_FLOGO + ) if ENV.TP_AUTO_IS_PROVISION_TIBCOHUB: - dp_config_resources_ingress(ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB, ENV.TP_AUTO_FQDN_TIBCOHUB) + dp_config_resources_ingress( + ENV.TP_AUTO_INGRESS_CONTROLLER, ENV.TP_AUTO_INGRESS_CONTROLLER_TIBCOHUB, + ENV.TP_AUTO_INGRESS_CONTROLLER_CLASS_NAME, ENV.TP_AUTO_FQDN_TIBCOHUB + ) o11y_config_dataplane_resource(ENV.TP_AUTO_K8S_DP_NAME) # for provision BWCE capability @@ -1247,4 +1312,4 @@ def tibcohub_provision_capability(dp_name, hub_name): Util.exit_error(f"Unhandled error: {e}", page, "unhandled_error_run.png") Util.browser_close() - Util.print_env_info() + Util.print_env_info(False) diff --git a/docs/recipes/automation/tp-setup/bootstrap/util.py b/docs/recipes/automation/tp-setup/bootstrap/util.py index 3732fce..891653b 100644 --- a/docs/recipes/automation/tp-setup/bootstrap/util.py +++ b/docs/recipes/automation/tp-setup/bootstrap/util.py @@ -5,6 +5,8 @@ from color_logger import ColorLogger from env import ENV import time +from datetime import datetime +from report import ReportYaml class Util: _browser = None @@ -34,14 +36,19 @@ def browser_close(): ColorLogger.success("Browser Closed Successfully.") if Util._run_start_time is not None: - ColorLogger.info(f"Total running time: {time.time() - Util._run_start_time:.2f} seconds") + ColorLogger.info(f"Total running time: {time.time() - Util._run_start_time:.2f} seconds, current time: {datetime.now().strftime('%m/%d/%Y %H:%M:%S')}") @staticmethod def screenshot_page(page, filename): if filename == "": ColorLogger.warning(f"Screenshot filename={filename} MUST be set.") return - screenshot_dir = ENV.TP_AUTO_SCREENSHOT_PATH + attempt_folder = str(ENV.RETRY_TIME) + screenshot_dir = os.path.join( + ENV.TP_AUTO_REPORT_PATH, + attempt_folder, + "screenshots" + ) # check folder screenshot_dir exist or not, if not create it if not os.path.exists(screenshot_dir): os.makedirs(screenshot_dir, exist_ok=True) @@ -49,6 +56,50 @@ def screenshot_page(page, filename): page.screenshot(path=file_path, full_page=True) print(f"Screenshot saved to {file_path}") + @staticmethod + def wait_for_success_message(page, timeout=30): + start_time = time.time() + + while True: + elapsed_time = time.time() - start_time + if elapsed_time > timeout: + print("Timeout: Neither success nor error notification appeared.") + return False + + try: + if page.locator(".notification-message").is_visible(): + return True + + if page.locator(".pl-notification--error").is_visible(): + return False + except Exception as e: + print(f"An unexpected error occurred: {e}") + pass + time.sleep(0.5) + + @staticmethod + def download_file(file_obj, filename): + """ + Downloads a file and saves it to the 'dp_commands' directory. + + Args: + file_obj: The content of the file to be saved. It should have a `save_as` method. + filename (str): The name of the file to be saved. + + Returns: + str: The path to the saved file. + """ + # Create 'dp_commands' folder if it does not exist + steps_dir = os.path.join(ENV.TP_AUTO_REPORT_PATH, "dp_commands") + if not os.path.exists(steps_dir): + os.makedirs(steps_dir, exist_ok=True) + # Define the full file path + file_path = os.path.join(steps_dir, filename) + # Save the file content to the specified path + file_obj.save_as(file_path) + print(f"File saved to {file_path}") + return file_path + @staticmethod def exit_error(message, page=None, filename=""): if page is not None: @@ -70,6 +121,20 @@ def refresh_page(page): @staticmethod def print_env_info(is_print_auth=True, is_print_dp=True): + ReportYaml.set(".ENV.CP_MAIL_URL", ENV.TP_AUTO_MAIL_URL) + ReportYaml.set(".ENV.CP_ADMIN_URL", ENV.TP_AUTO_ADMIN_URL) + ReportYaml.set(".ENV.CP_ADMIN_USER", ENV.CP_ADMIN_EMAIL) + ReportYaml.set(".ENV.CP_ADMIN_PASSWORD", ENV.CP_ADMIN_PASSWORD) + ReportYaml.set(".ENV.CP_URL", ENV.TP_AUTO_LOGIN_URL) + ReportYaml.set(".ENV.CP_USER", ENV.DP_USER_EMAIL) + ReportYaml.set(".ENV.CP_PASSWORD", ENV.DP_USER_PASSWORD) + ReportYaml.set(".ENV.ELASTIC_URL", ENV.TP_AUTO_ELASTIC_URL) + ReportYaml.set(".ENV.KIBANA_URL", ENV.TP_AUTO_KIBANA_URL) + ReportYaml.set(".ENV.ELASTIC_USER", ENV.TP_AUTO_ELASTIC_USER) + ReportYaml.set(".ENV.ELASTIC_PASSWORD", ENV.TP_AUTO_ELASTIC_PASSWORD) + ReportYaml.set(".ENV.PROMETHEUS_URL", ENV.TP_AUTO_PROMETHEUS_URL) + ReportYaml.set(".ENV.PROMETHEUS_USER", ENV.TP_AUTO_PROMETHEUS_USER) + ReportYaml.set(".ENV.PROMETHEUS_PASSWORD", ENV.TP_AUTO_PROMETHEUS_PASSWORD) str_num = 90 col_space = 28 print("=" * str_num) @@ -79,11 +144,11 @@ def print_env_info(is_print_auth=True, is_print_dp=True): print("-" * str_num) print(f"{'Mail URL:':<{col_space}}{ENV.TP_AUTO_MAIL_URL}") print("-" * str_num) - print(f"{'Admin URL:':<{col_space}}{ENV.TP_AUTO_ADMIN_URL}") + print(f"{'CP Admin URL:':<{col_space}}{ENV.TP_AUTO_ADMIN_URL}") print(f"{'Admin Email:':<{col_space}}{ENV.CP_ADMIN_EMAIL}") print(f"{'Admin Password:':<{col_space}}{ENV.CP_ADMIN_PASSWORD}") print("-" * str_num) - print(f"{'Login URL:':<{col_space}}{ENV.TP_AUTO_LOGIN_URL}") + print(f"{'CP Login URL:':<{col_space}}{ENV.TP_AUTO_LOGIN_URL}") print(f"{'User Email:':<{col_space}}{ENV.DP_USER_EMAIL}") print(f"{'User Password:':<{col_space}}{ENV.DP_USER_PASSWORD}") if is_print_dp: @@ -101,25 +166,33 @@ def print_env_info(is_print_auth=True, is_print_dp=True): if ENV.TP_AUTO_PROMETHEUS_PASSWORD != "": print(f"{'User Password:':<{col_space}}{ENV.TP_AUTO_PROMETHEUS_PASSWORD}") print("-" * str_num) - if ENV.TP_AUTO_IS_CREATE_DP: + dp_names = ReportYaml.get_dataplanes() + if len(dp_names) > 0: print(f"{'Data Plane, App': ^{str_num}}") print("-" * str_num) - print(f"{'DataPlane Name:':<{col_space}}{ENV.TP_AUTO_K8S_DP_NAME}") - if ENV.TP_AUTO_IS_CONFIG_O11Y: - print(f"{'DataPlane Configured:':<{col_space}}{ENV.TP_AUTO_IS_CONFIG_O11Y}") - - print(f"{'Provisioned capabilities:':<{col_space}}" - f"{'BWCE ' if ENV.TP_AUTO_IS_PROVISION_BWCE else ''}" - f"{'EMS ' if ENV.TP_AUTO_IS_PROVISION_EMS else ''}" - f"{'Flogo ' if ENV.TP_AUTO_IS_PROVISION_FLOGO else ''}" - f"{'Pulsar ' if ENV.TP_AUTO_IS_PROVISION_PULSAR else ''}" - f"{'TibcoHub' if ENV.TP_AUTO_IS_PROVISION_TIBCOHUB else ''}" - ) - - if ENV.TP_AUTO_IS_PROVISION_FLOGO: - print(f"{'Flogo App Name:':<{col_space}}{ENV.FLOGO_APP_NAME}") - if ENV.FLOGO_APP_STATUS != "": - print(f"{'Flogo App Status:':<{col_space}}{ENV.FLOGO_APP_STATUS}") + for dp_name in dp_names: + print(f"{'DataPlane Name:':<{col_space}}{dp_name}") + + is_config_o11y = ReportYaml.get_dataplane_info(dp_name, "o11yConfig") + if is_config_o11y == "True": + print(f"{'DataPlane Configured:':<{col_space}}{is_config_o11y}") + + dp_capabilities = ReportYaml.get_capabilities(dp_name) + if len(dp_capabilities) > 0: + print(f"{'Provisioned capabilities:':<{col_space}}" + f"{[cap.upper() for cap in dp_capabilities]}" + ) + + for dp_capability in dp_capabilities: + app_names = ReportYaml.get_capability_apps(dp_name, dp_capability) + if len(app_names) > 0: + print(f"{dp_capability.capitalize()}") + + for app_name in app_names: + app_status = ReportYaml.get_capability_app_info(dp_name, dp_capability, app_name, "status") + print(f"{' App Name:':<{col_space}}{app_name}") + if app_status: + print(f"{' App Status:':<{col_space}}{app_status}") print("=" * str_num) @staticmethod