From 0d9ac14864829fab151a0267a0e97de7c47b97e2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 13:45:31 +0100 Subject: [PATCH 01/56] feat: mirror filecoin built-in actors Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 51 +++++++++++ scripts/mirror-builtin-actors.sh | 93 +++++++++++++++++++++ 2 files changed, 144 insertions(+) create mode 100644 .github/workflows/mirror-builtin-actors.yml create mode 100755 scripts/mirror-builtin-actors.sh diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml new file mode 100644 index 000000000..3b7075c55 --- /dev/null +++ b/.github/workflows/mirror-builtin-actors.yml @@ -0,0 +1,51 @@ +name: Mirror Builtin Actors Releases + +on: + schedule: + - cron: '0 * * * *' # Runs every hour + pull_request: + paths: + - 'script/mirror-builtin-actors.sh' + push: + paths: + - 'script/mirror-builtin-actors.sh' + workflow_dispatch: + +jobs: + mirror-releases-do: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Set up S3cmd cli tool for DigitalOcean + uses: s3-actions/s3cmd@v1.5.0 + with: + provider: digitalocean + access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} + secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + + - name: Run mirroring script to DigitalOcean + working-directory: script + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + run: bash ./mirror-builtin-actors.sh + + mirror-releases-cf: + needs: mirror-releases-do + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@v2 + + - name: Set up S3cmd cli tool for Cloudflare + uses: s3-actions/s3cmd@v1.5.0 + with: + provider: cloudflare + access_key: ${{ secrets.R2_ACCESS_KEY }} + secret_key: ${{ secrets.R2_SECRET_KEY }} + account_id: 2238a825c5aca59233eab1f221f7aefb + + - name: Run mirroring script to Cloudflare + working-directory: script + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh new file mode 100755 index 000000000..b19eabc0d --- /dev/null +++ b/scripts/mirror-builtin-actors.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# This script automates the process of mirroring the latest releases of FILEcoin's builtin-actors. +# It downloads the latest release assets from GitHub and compares them with the existing ones in an S3 bucket. +# If there are new or updated assets, the script uploads them to the bucket and send alerts to Slack. + +set -eo pipefail + +export DEBIAN_FRONTEND=noninteractive + +apt-get -qqq --yes install wget + +S3_BUCKET="filecoin-builtin-actors" +BASE_FOLDER="releases/actors" + +# Move to the base folder +mkdir -p "$BASE_FOLDER" +cd "$BASE_FOLDER" + +# Set the GitHub API URL for the latest release +API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases/latest" + +# Use curl to fetch the latest release data +ASSETS=$(curl -sS $API_URL | jq -c '.assets[]') + +# Check if assets are available +if [ -z "$ASSETS" ]; then + echo "No assets found for the latest release." + exit 1 +fi + +# Download ASSETS from GitHub +echo "$ASSETS" | while read -r asset; do + DOWNLOAD_URL=$(echo "$asset" | jq -r '.browser_download_url') + FILE_NAME=$(echo "$asset" | jq -r '.name') + + if [ ! -f "$FILE_NAME" ]; then + echo "Downloading $FILE_NAME..." + wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" + fi +done + +# Initialize arrays for tracking uploads +declare -a successful_uploads +declare -a failed_uploads + +# Function to send Slack alert with summary +send_slack_alert_with_summary() { + local success_list="${successful_uploads[*]}" + local failure_list="${failed_uploads[*]}" + local message="Builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" + + curl -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_API_TOKEN" \ + --data "{\"channel\":\"#forest-dump\",\"text\":\"${message}\"}" \ + https://slack.com/api/chat.postMessage +} + +# Loop through all files in the current directory +for file in *; do + if [ -f "$file" ]; then + echo "Checking $file against S3 version..." + + # Create a temporary directory for the S3 download + TEMP_S3_DIR=$(mktemp -d) + + # Download the file from S3 to the temporary location + s3cmd get --no-progress "s3://$S3_BUCKET/actors/$file" "$TEMP_S3_DIR/$file" || true + + # Compare the local FILE with the downloaded FILE + if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then + echo "$file is the same in S3, skipping..." + rm -rf "$file" + else + echo "Local $file is different. Uploading to S3..." + if s3cmd --acl-public put --no-progress "$file" "s3://$S3_BUCKET/actors/$file"; then + echo "Uploaded $file to s3://$S3_BUCKET/actors/$file" + successful_uploads+=("$file") + else + echo "Failed to upload $file." + failed_uploads+=("$file") + fi + fi + + rm -rf "$TEMP_S3_DIR" + fi +done + +# Send summary alert at the end only if there were uploads or failures +if [ ${#successful_uploads[@]} -ne 0 ] || [ ${#failed_uploads[@]} -ne 0 ]; then + send_slack_alert_with_summary +else + echo "No new mirroring uploads or failures, no Slack alert needed." +fi From 2bc2381b81b90820ec4d1908d64f2b6c2c87876f Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 13:51:30 +0100 Subject: [PATCH 02/56] make bucket name configurable Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++-- scripts/mirror-builtin-actors.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 3b7075c55..c064f7815 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -28,7 +28,7 @@ jobs: - name: Run mirroring script to DigitalOcean working-directory: script if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh + run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors mirror-releases-cf: needs: mirror-releases-do @@ -48,4 +48,4 @@ jobs: - name: Run mirroring script to Cloudflare working-directory: script if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors + run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index b19eabc0d..4f62114ca 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -10,7 +10,7 @@ export DEBIAN_FRONTEND=noninteractive apt-get -qqq --yes install wget -S3_BUCKET="filecoin-builtin-actors" +S3_BUCKET="$1" BASE_FOLDER="releases/actors" # Move to the base folder From 8e03ceadc1f8fe9e6a72872422a27407b32fbd12 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:00:38 +0100 Subject: [PATCH 03/56] test cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index c064f7815..cfa62786d 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -4,9 +4,13 @@ on: schedule: - cron: '0 * * * *' # Runs every hour pull_request: + branches: + - main paths: - 'script/mirror-builtin-actors.sh' push: + branches: + - main paths: - 'script/mirror-builtin-actors.sh' workflow_dispatch: From c3f2dbf4ab4e12789a41be4bb183199f433ec27a Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:10:07 +0100 Subject: [PATCH 04/56] test deploy cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index cfa62786d..414ceb623 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -1,8 +1,8 @@ name: Mirror Builtin Actors Releases on: - schedule: - - cron: '0 * * * *' # Runs every hour +# schedule: +# - cron: '0 * * * *' # Runs every hour pull_request: branches: - main From 817563d50898a1b754cc581c67ce850b243e8fe0 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:17:31 +0100 Subject: [PATCH 05/56] add slack token env and also make slack channel configurable Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 13 +++++++------ scripts/mirror-builtin-actors.sh | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 414ceb623..adefaee5a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -1,16 +1,11 @@ name: Mirror Builtin Actors Releases - on: # schedule: # - cron: '0 * * * *' # Runs every hour pull_request: - branches: - - main paths: - 'script/mirror-builtin-actors.sh' push: - branches: - - main paths: - 'script/mirror-builtin-actors.sh' workflow_dispatch: @@ -33,6 +28,9 @@ jobs: working-directory: script if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors + env: + SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} + mirror-releases-cf: needs: mirror-releases-do @@ -47,9 +45,12 @@ jobs: provider: cloudflare access_key: ${{ secrets.R2_ACCESS_KEY }} secret_key: ${{ secrets.R2_SECRET_KEY }} - account_id: 2238a825c5aca59233eab1f221f7aefb + account_id: "2238a825c5aca59233eab1f221f7aefb" - name: Run mirroring script to Cloudflare working-directory: script if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors + env: + SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL: "#forest-dump" diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 4f62114ca..be9063501 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -51,7 +51,7 @@ send_slack_alert_with_summary() { local message="Builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" curl -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_API_TOKEN" \ - --data "{\"channel\":\"#forest-dump\",\"text\":\"${message}\"}" \ + --data "{\"channel\":\"#SLACK_CHANNEL\",\"text\":\"${message}\"}" \ https://slack.com/api/chat.postMessage } From b67964a54cf513f946758c5dd5a845994a7efd49 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:27:06 +0100 Subject: [PATCH 06/56] test deploy cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index adefaee5a..2d438fd61 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -4,10 +4,10 @@ on: # - cron: '0 * * * *' # Runs every hour pull_request: paths: - - 'script/mirror-builtin-actors.sh' + - 'scripts/mirror-builtin-actors.sh' push: paths: - - 'script/mirror-builtin-actors.sh' + - 'scripts/mirror-builtin-actors.sh' workflow_dispatch: jobs: From a42ee6b01dae9e22aa9edd6f6a61b556c6f7f00b Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:28:35 +0100 Subject: [PATCH 07/56] test deploy cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 2d438fd61..a687b79ea 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -26,7 +26,7 @@ jobs: - name: Run mirroring script to DigitalOcean working-directory: script - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} @@ -49,7 +49,7 @@ jobs: - name: Run mirroring script to Cloudflare working-directory: script - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} From 8bdb6f4aa935b431d363172cdd32e53b43348d65 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:31:24 +0100 Subject: [PATCH 08/56] test deploy mirror actors Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index a687b79ea..5c03df515 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -25,15 +25,15 @@ jobs: secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - name: Run mirroring script to DigitalOcean - working-directory: script + working-directory: scripts #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL: "#forest-dump" mirror-releases-cf: - needs: mirror-releases-do runs-on: ubuntu-latest steps: - name: Checkout source code @@ -48,7 +48,7 @@ jobs: account_id: "2238a825c5aca59233eab1f221f7aefb" - name: Run mirroring script to Cloudflare - working-directory: script + working-directory: scripts #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: From b9e17ee68a6f6fbf3e7fef93e0cbc6f143024791 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 14:35:35 +0100 Subject: [PATCH 09/56] remove wget download Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index be9063501..dad5d43f3 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -8,8 +8,6 @@ set -eo pipefail export DEBIAN_FRONTEND=noninteractive -apt-get -qqq --yes install wget - S3_BUCKET="$1" BASE_FOLDER="releases/actors" From df59ea1fa17550271e5b51d27217724ea12e5a2c Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:01:28 +0100 Subject: [PATCH 10/56] fix region upload error Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 +++- scripts/mirror-builtin-actors.sh | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 5c03df515..935d3e618 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -21,6 +21,7 @@ jobs: uses: s3-actions/s3cmd@v1.5.0 with: provider: digitalocean + region: fra1 access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -43,6 +44,7 @@ jobs: uses: s3-actions/s3cmd@v1.5.0 with: provider: cloudflare + region: auto access_key: ${{ secrets.R2_ACCESS_KEY }} secret_key: ${{ secrets.R2_SECRET_KEY }} account_id: "2238a825c5aca59233eab1f221f7aefb" @@ -53,4 +55,4 @@ jobs: run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#forest-dump" + SLACK_CHANNEL: "#orest-dumps" diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index dad5d43f3..fddd40b4b 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -49,7 +49,7 @@ send_slack_alert_with_summary() { local message="Builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" curl -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_API_TOKEN" \ - --data "{\"channel\":\"#SLACK_CHANNEL\",\"text\":\"${message}\"}" \ + --data "{\"channel\":\"$SLACK_CHANNEL\",\"text\":\"${message}\"}" \ https://slack.com/api/chat.postMessage } @@ -62,7 +62,7 @@ for file in *; do TEMP_S3_DIR=$(mktemp -d) # Download the file from S3 to the temporary location - s3cmd get --no-progress "s3://$S3_BUCKET/actors/$file" "$TEMP_S3_DIR/$file" || true + s3cmd get --no-progress "s3://$S3_BUCKET/$file" "$TEMP_S3_DIR/$file" --region auto || true # Compare the local FILE with the downloaded FILE if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then @@ -70,8 +70,8 @@ for file in *; do rm -rf "$file" else echo "Local $file is different. Uploading to S3..." - if s3cmd --acl-public put --no-progress "$file" "s3://$S3_BUCKET/actors/$file"; then - echo "Uploaded $file to s3://$S3_BUCKET/actors/$file" + if s3cmd --acl-public put --no-progress "$file" "s3://$S3_BUCKET/$file"; then + echo "Uploaded $file to s3://$S3_BUCKET/$file" successful_uploads+=("$file") else echo "Failed to upload $file." From 531899e8c8d146d4153b006c7026926e8e75c571 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:17:06 +0100 Subject: [PATCH 11/56] use different enviroment for slack alert Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 7 +++++-- scripts/mirror-builtin-actors.sh | 9 ++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 935d3e618..e806d5c08 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -32,7 +32,8 @@ jobs: env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-dump" - + ENVIROMENT: digitalocean + BUCKET_NAME: filecoin-builtin-actors mirror-releases-cf: runs-on: ubuntu-latest @@ -55,4 +56,6 @@ jobs: run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#orest-dumps" + SLACK_CHANNEL: "#forest-dumps" + ENVIROMENT: cloudflare + BUCKET_NAME: filecoin-builtin-actors diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index fddd40b4b..3125440f5 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -8,7 +8,6 @@ set -eo pipefail export DEBIAN_FRONTEND=noninteractive -S3_BUCKET="$1" BASE_FOLDER="releases/actors" # Move to the base folder @@ -46,7 +45,7 @@ declare -a failed_uploads send_slack_alert_with_summary() { local success_list="${successful_uploads[*]}" local failure_list="${failed_uploads[*]}" - local message="Builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" + local message="$ENVIROMENT builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" curl -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_API_TOKEN" \ --data "{\"channel\":\"$SLACK_CHANNEL\",\"text\":\"${message}\"}" \ @@ -62,7 +61,7 @@ for file in *; do TEMP_S3_DIR=$(mktemp -d) # Download the file from S3 to the temporary location - s3cmd get --no-progress "s3://$S3_BUCKET/$file" "$TEMP_S3_DIR/$file" --region auto || true + s3cmd get --no-progress "s3://$BUCKET_NAME/$file" "$TEMP_S3_DIR/$file" --region auto || true # Compare the local FILE with the downloaded FILE if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then @@ -70,8 +69,8 @@ for file in *; do rm -rf "$file" else echo "Local $file is different. Uploading to S3..." - if s3cmd --acl-public put --no-progress "$file" "s3://$S3_BUCKET/$file"; then - echo "Uploaded $file to s3://$S3_BUCKET/$file" + if s3cmd --acl-public put --no-progress "$file" "s3://$BUCKET_NAME/$file"; then + echo "Uploaded $file to s3://$BUCKET_NAME/$file" successful_uploads+=("$file") else echo "Failed to upload $file." From b8f58cd815eed50194ecaf5cacae3d8b2fca62e1 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:19:02 +0100 Subject: [PATCH 12/56] revert test deploy Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index e806d5c08..673bfc12e 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -1,7 +1,7 @@ name: Mirror Builtin Actors Releases on: -# schedule: -# - cron: '0 * * * *' # Runs every hour + schedule: + - cron: '0 * * * *' # Runs every hour pull_request: paths: - 'scripts/mirror-builtin-actors.sh' @@ -27,8 +27,8 @@ jobs: - name: Run mirroring script to DigitalOcean working-directory: scripts - #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + run: bash ./mirror-builtin-actors.sh env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-dump" @@ -52,10 +52,10 @@ jobs: - name: Run mirroring script to Cloudflare working-directory: scripts - #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh filecoin-builtin-actors/actors + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + run: bash ./mirror-builtin-actors.sh env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#forest-dumps" + SLACK_CHANNEL: "#forest-dump" ENVIROMENT: cloudflare BUCKET_NAME: filecoin-builtin-actors From c48bb13470ee6faf1a7a623d6a0688393cdcfe78 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:20:23 +0100 Subject: [PATCH 13/56] shorten slack message Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 3125440f5..2bd199085 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -86,5 +86,5 @@ done if [ ${#successful_uploads[@]} -ne 0 ] || [ ${#failed_uploads[@]} -ne 0 ]; then send_slack_alert_with_summary else - echo "No new mirroring uploads or failures, no Slack alert needed." + echo "No new mirroring uploads or failures" fi From 0fd16fb7a568daf9427b78d3cf13a69ccd5648f2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:21:56 +0100 Subject: [PATCH 14/56] nits: naming Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 2bd199085..1280b9ee6 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -8,11 +8,11 @@ set -eo pipefail export DEBIAN_FRONTEND=noninteractive -BASE_FOLDER="releases/actors" +RELEASE_FOLDER="releases/actors" # Move to the base folder -mkdir -p "$BASE_FOLDER" -cd "$BASE_FOLDER" +mkdir -p "$RELEASE_FOLDER" +cd "$RELEASE_FOLDER" # Set the GitHub API URL for the latest release API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases/latest" @@ -63,7 +63,7 @@ for file in *; do # Download the file from S3 to the temporary location s3cmd get --no-progress "s3://$BUCKET_NAME/$file" "$TEMP_S3_DIR/$file" --region auto || true - # Compare the local FILE with the downloaded FILE + # Compare the local file with the downloaded file if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then echo "$file is the same in S3, skipping..." rm -rf "$file" From db154d1180e7694ba5d1d28328aed2313624b66f Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Thu, 16 Nov 2023 16:27:41 +0100 Subject: [PATCH 15/56] nits: remove trailing region Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 1280b9ee6..e397615d5 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -61,7 +61,7 @@ for file in *; do TEMP_S3_DIR=$(mktemp -d) # Download the file from S3 to the temporary location - s3cmd get --no-progress "s3://$BUCKET_NAME/$file" "$TEMP_S3_DIR/$file" --region auto || true + s3cmd get --no-progress "s3://$BUCKET_NAME/$file" "$TEMP_S3_DIR/$file" || true # Compare the local file with the downloaded file if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then From 7cf269e9ade01c85973e8fc74304bf3bc7af7ce2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 1 Dec 2023 14:39:11 +0100 Subject: [PATCH 16/56] mirror releases weeks Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- scripts/mirror-builtin-actors.sh | 148 +++++++++++--------- 2 files changed, 83 insertions(+), 67 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 673bfc12e..039084331 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -50,7 +50,7 @@ jobs: secret_key: ${{ secrets.R2_SECRET_KEY }} account_id: "2238a825c5aca59233eab1f221f7aefb" - - name: Run mirroring script to Cloudflare + - name: Run script to mirror to Cloudflare working-directory: scripts if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: bash ./mirror-builtin-actors.sh diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index e397615d5..45dfe4610 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -1,90 +1,106 @@ #!/bin/bash -# This script automates the process of mirroring the latest releases of FILEcoin's builtin-actors. -# It downloads the latest release assets from GitHub and compares them with the existing ones in an S3 bucket. -# If there are new or updated assets, the script uploads them to the bucket and send alerts to Slack. +# This script mirrors all releases of FILEcoin's builtin-actors, properly versioned. +# It downloads release assets from GitHub, compares them with the existing ones in an S3 bucket, +# uploads new or updated assets, and sends alerts to Slack if there are failures. +# It only processes releases updated within the past week. set -eo pipefail export DEBIAN_FRONTEND=noninteractive -RELEASE_FOLDER="releases/actors" +BASE_FOLDER="$(pwd)/releases/actors" +API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" +FAILED_LOG="$(pwd)/failed_uploads.log" -# Move to the base folder -mkdir -p "$RELEASE_FOLDER" -cd "$RELEASE_FOLDER" +# Create base directory and log file +mkdir -p "$BASE_FOLDER" +> "$FAILED_LOG" # Clear previous log content -# Set the GitHub API URL for the latest release -API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases/latest" +# Get the date 1 week ago in YYYY-MM-DD format +#ONE_WEEK_AGO=$(date -d '1 week ago' +%F) # For Linux +ONE_WEEK_AGO=$(date -v-1w +%F) # For macOS +echo "one_week_age - $ONE_WEEK_AGO" -# Use curl to fetch the latest release data -ASSETS=$(curl -sS $API_URL | jq -c '.assets[]') +# Fetch all releases and create directories for those updated in the last week +curl -sS $API_URL | jq -c '.[]' | while read -r release; do + TAG_NAME=$(echo "$release" | jq -r '.tag_name') + UPDATE_DATE=$(echo "$release" | jq -r '.updated_at') + echo "updated_at - $UPDATE_DATE" + PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at' | cut -c 1-10) -# Check if assets are available -if [ -z "$ASSETS" ]; then - echo "No assets found for the latest release." - exit 1 -fi - -# Download ASSETS from GitHub -echo "$ASSETS" | while read -r asset; do - DOWNLOAD_URL=$(echo "$asset" | jq -r '.browser_download_url') - FILE_NAME=$(echo "$asset" | jq -r '.name') + echo "published_date - $PUBLISHED_DATE" # Debugging line - if [ ! -f "$FILE_NAME" ]; then - echo "Downloading $FILE_NAME..." - wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" + # Check if PUBLISHED_DATE is equal to or more recent than ONE_WEEK_AGO + if [[ "$PUBLISHED_DATE" < "$ONE_WEEK_AGO" ]]; then + mkdir -p "$BASE_FOLDER/$TAG_NAME" fi done - -# Initialize arrays for tracking uploads -declare -a successful_uploads +# Initialize array for tracking failed uploads declare -a failed_uploads -# Function to send Slack alert with summary -send_slack_alert_with_summary() { - local success_list="${successful_uploads[*]}" - local failure_list="${failed_uploads[*]}" - local message="$ENVIROMENT builtin-actors assets upload summary:\nāœ… Successful: $success_list\nšŸ”„ Failed: $failure_list" +# Function to send Slack alert with failed uploads +send_slack_alert_with_failed() { + local failure_count=${#failed_uploads[@]} + local message="šŸšØ FILEcoin Actors Mirror Update:\nšŸ”„ Failed" - curl -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_API_TOKEN" \ - --data "{\"channel\":\"$SLACK_CHANNEL\",\"text\":\"${message}\"}" \ - https://slack.com/api/chat.postMessage + # Attach the log file with failed uploads + curl -F file=@"$FAILED_LOG" -F "initial_comment=$message" -F channels="$SLACK_CHANNEL" \ + -H "Authorization: Bearer $SLACK_API_TOKEN" \ + https://slack.com/api/files.upload } -# Loop through all files in the current directory -for file in *; do - if [ -f "$file" ]; then - echo "Checking $file against S3 version..." - - # Create a temporary directory for the S3 download - TEMP_S3_DIR=$(mktemp -d) - - # Download the file from S3 to the temporary location - s3cmd get --no-progress "s3://$BUCKET_NAME/$file" "$TEMP_S3_DIR/$file" || true - - # Compare the local file with the downloaded file - if cmp --silent "$file" "$TEMP_S3_DIR/$file"; then - echo "$file is the same in S3, skipping..." - rm -rf "$file" +# Loop through all version directories for downloading assets and S3 upload +while IFS= read -r version_dir; do + TAG_NAME=${version_dir#$BASE_FOLDER/} + VERSION_DIR="$version_dir" + if [ -d "$VERSION_DIR" ]; then + echo "Entering directory: $VERSION_DIR" + + release=$(curl -sS $API_URL | jq -c --arg TAG_NAME "$TAG_NAME" '.[] | select(.tag_name==$TAG_NAME)') + ASSETS=$(echo "$release" | jq -c '.assets[]') + + # Download assets for this release + pushd "$VERSION_DIR" > /dev/null + echo "Processing assets for $TAG_NAME..." + if [ -z "$ASSETS" ]; then + echo "No assets found for $TAG_NAME." else - echo "Local $file is different. Uploading to S3..." - if s3cmd --acl-public put --no-progress "$file" "s3://$BUCKET_NAME/$file"; then - echo "Uploaded $file to s3://$BUCKET_NAME/$file" - successful_uploads+=("$file") - else - echo "Failed to upload $file." - failed_uploads+=("$file") - fi + echo "$ASSETS" | while IFS= read -r asset; do + DOWNLOAD_URL=$(echo "$asset" | jq -r '.browser_download_url') + FILE_NAME=$(echo "$asset" | jq -r '.name') + + echo "Checking asset: $FILE_NAME" + if [ ! -f "$FILE_NAME" ]; then + echo "Downloading $FILE_NAME..." + wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" || echo "Failed to download $FILE_NAME" + fi + + # S3 upload logic for each file + echo "Checking $FILE_NAME against S3 version..." + TEMP_S3_DIR=$(mktemp -d) + s3cmd get --no-progress "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" || true + + if cmp --silent "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME"; then + echo "$FILE_NAME is the same in S3, skipping..." + else + echo "Local $FILE_NAME is different. Uploading to S3..." + if s3cmd --acl-public put "$FILE_NAME" "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME"; then + echo "Uploaded $FILE_NAME to s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" + else + echo "Failed to upload $FILE_NAME. Logging to $FAILED_LOG" + echo "$TAG_NAME/$FILE_NAME" >> "$FAILED_LOG" + failed_uploads+=("$TAG_NAME/$FILE_NAME") + fi + fi + rm -rf "$TEMP_S3_DIR" + done fi - - rm -rf "$TEMP_S3_DIR" + popd > /dev/null fi -done +done < <(find "$BASE_FOLDER" -mindepth 1 -type d) -# Send summary alert at the end only if there were uploads or failures -if [ ${#successful_uploads[@]} -ne 0 ] || [ ${#failed_uploads[@]} -ne 0 ]; then - send_slack_alert_with_summary -else - echo "No new mirroring uploads or failures" +# Send summary alert only if there were failed uploads +if [ ${#failed_uploads[@]} -ne 0 ]; then + send_slack_alert_with_failed fi From 5dd0df4f7b26e25726803d6ef8511a6d7b0f1454 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 1 Dec 2023 15:09:00 +0100 Subject: [PATCH 17/56] fix ci Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 45dfe4610..5c9bfd0b5 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -15,34 +15,31 @@ FAILED_LOG="$(pwd)/failed_uploads.log" # Create base directory and log file mkdir -p "$BASE_FOLDER" -> "$FAILED_LOG" # Clear previous log content # Get the date 1 week ago in YYYY-MM-DD format -#ONE_WEEK_AGO=$(date -d '1 week ago' +%F) # For Linux -ONE_WEEK_AGO=$(date -v-1w +%F) # For macOS -echo "one_week_age - $ONE_WEEK_AGO" +ONE_WEEK_AGO=$(date -d '2 week ago' +%s) -# Fetch all releases and create directories for those updated in the last week +# Fetch all releases and create directories for those published in the last week curl -sS $API_URL | jq -c '.[]' | while read -r release; do TAG_NAME=$(echo "$release" | jq -r '.tag_name') - UPDATE_DATE=$(echo "$release" | jq -r '.updated_at') - echo "updated_at - $UPDATE_DATE" - PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at' | cut -c 1-10) + PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') - echo "published_date - $PUBLISHED_DATE" # Debugging line + # Convert PUBLISHED_DATE to seconds since the epoch for comparison + PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) # For Linux - # Check if PUBLISHED_DATE is equal to or more recent than ONE_WEEK_AGO - if [[ "$PUBLISHED_DATE" < "$ONE_WEEK_AGO" ]]; then + # Check if PUBLISHED_DATE_SEC is equal to or more recent than ONE_WEEK_AGO + if [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_WEEK_AGO" ]]; then mkdir -p "$BASE_FOLDER/$TAG_NAME" fi done + # Initialize array for tracking failed uploads declare -a failed_uploads # Function to send Slack alert with failed uploads send_slack_alert_with_failed() { local failure_count=${#failed_uploads[@]} - local message="šŸšØ FILEcoin Actors Mirror Update:\nšŸ”„ Failed" + local message="šŸšØ FILEcoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" # Attach the log file with failed uploads curl -F file=@"$FAILED_LOG" -F "initial_comment=$message" -F channels="$SLACK_CHANNEL" \ @@ -52,7 +49,7 @@ send_slack_alert_with_failed() { # Loop through all version directories for downloading assets and S3 upload while IFS= read -r version_dir; do - TAG_NAME=${version_dir#$BASE_FOLDER/} + TAG_NAME=${version_dir#"$BASE_FOLDER"/} VERSION_DIR="$version_dir" if [ -d "$VERSION_DIR" ]; then echo "Entering directory: $VERSION_DIR" From f4427900eef1ed2b393f34df53e3040349a36ddd Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 1 Dec 2023 15:13:00 +0100 Subject: [PATCH 18/56] fix ci Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 5c9bfd0b5..2739dd6e4 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -1,6 +1,6 @@ #!/bin/bash -# This script mirrors all releases of FILEcoin's builtin-actors, properly versioned. +# This script mirrors all releases of Filecoin's builtin-actors, properly versioned. # It downloads release assets from GitHub, compares them with the existing ones in an S3 bucket, # uploads new or updated assets, and sends alerts to Slack if there are failures. # It only processes releases updated within the past week. @@ -16,8 +16,8 @@ FAILED_LOG="$(pwd)/failed_uploads.log" # Create base directory and log file mkdir -p "$BASE_FOLDER" -# Get the date 1 week ago in YYYY-MM-DD format -ONE_WEEK_AGO=$(date -d '2 week ago' +%s) +# Get the date 2 week ago in YYYY-MM-DD format +TWO_WEEK_AGO=$(date -d '2 week ago' +%s) # Fetch all releases and create directories for those published in the last week curl -sS $API_URL | jq -c '.[]' | while read -r release; do @@ -39,7 +39,7 @@ declare -a failed_uploads # Function to send Slack alert with failed uploads send_slack_alert_with_failed() { local failure_count=${#failed_uploads[@]} - local message="šŸšØ FILEcoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" + local message="šŸšØ Fileoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" # Attach the log file with failed uploads curl -F file=@"$FAILED_LOG" -F "initial_comment=$message" -F channels="$SLACK_CHANNEL" \ From b0f9ba6f4a7e655f6a40a0b7ce119107b69e63c4 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 1 Dec 2023 15:40:15 +0100 Subject: [PATCH 19/56] add more comments and remove downloaded actors Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 2739dd6e4..fe563d7c2 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -3,7 +3,7 @@ # This script mirrors all releases of Filecoin's builtin-actors, properly versioned. # It downloads release assets from GitHub, compares them with the existing ones in an S3 bucket, # uploads new or updated assets, and sends alerts to Slack if there are failures. -# It only processes releases updated within the past week. +# It only processes releases updated within the past weeks. set -eo pipefail @@ -80,14 +80,17 @@ while IFS= read -r version_dir; do if cmp --silent "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME"; then echo "$FILE_NAME is the same in S3, skipping..." + rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" else echo "Local $FILE_NAME is different. Uploading to S3..." if s3cmd --acl-public put "$FILE_NAME" "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME"; then echo "Uploaded $FILE_NAME to s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" + rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" else echo "Failed to upload $FILE_NAME. Logging to $FAILED_LOG" echo "$TAG_NAME/$FILE_NAME" >> "$FAILED_LOG" failed_uploads+=("$TAG_NAME/$FILE_NAME") + rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" fi fi rm -rf "$TEMP_S3_DIR" @@ -100,4 +103,6 @@ done < <(find "$BASE_FOLDER" -mindepth 1 -type d) # Send summary alert only if there were failed uploads if [ ${#failed_uploads[@]} -ne 0 ]; then send_slack_alert_with_failed +else + echo "No new mirroring failures" fi From 7cb2ac6fbacb76349c94b55e01cf66c0b9133cd2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Sat, 2 Dec 2023 23:04:14 +0100 Subject: [PATCH 20/56] add more comments Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index fe563d7c2..7862d1648 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -25,9 +25,9 @@ curl -sS $API_URL | jq -c '.[]' | while read -r release; do PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') # Convert PUBLISHED_DATE to seconds since the epoch for comparison - PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) # For Linux + PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) - # Check if PUBLISHED_DATE_SEC is equal to or more recent than ONE_WEEK_AGO + # Check if PUBLISHED_DATE_SEC is equal to or more recent than TWO_WEEK_AGO if [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_WEEK_AGO" ]]; then mkdir -p "$BASE_FOLDER/$TAG_NAME" fi @@ -41,7 +41,6 @@ send_slack_alert_with_failed() { local failure_count=${#failed_uploads[@]} local message="šŸšØ Fileoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" - # Attach the log file with failed uploads curl -F file=@"$FAILED_LOG" -F "initial_comment=$message" -F channels="$SLACK_CHANNEL" \ -H "Authorization: Bearer $SLACK_API_TOKEN" \ https://slack.com/api/files.upload From 9eb7bc4e4b5d1369ea0307f4358ad3d9f7d514fe Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Sun, 3 Dec 2023 08:57:02 +0100 Subject: [PATCH 21/56] nits: improve comments Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 7862d1648..80541bd40 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -1,9 +1,11 @@ #!/bin/bash -# This script mirrors all releases of Filecoin's builtin-actors, properly versioned. -# It downloads release assets from GitHub, compares them with the existing ones in an S3 bucket, -# uploads new or updated assets, and sends alerts to Slack if there are failures. -# It only processes releases updated within the past weeks. +# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past two weeks. +# It performs the following operations: +# - Downloads release assets from GitHub. +# - Compares these assets with the existing ones in an S3 bucket. +# - Uploads new or updated assets to the S3 bucket. +# - Sends an alert to Slack if any uploads fail. set -eo pipefail @@ -13,13 +15,13 @@ BASE_FOLDER="$(pwd)/releases/actors" API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" FAILED_LOG="$(pwd)/failed_uploads.log" -# Create base directory and log file + : 'Create base directory and log file' mkdir -p "$BASE_FOLDER" -# Get the date 2 week ago in YYYY-MM-DD format +: 'Calculate the date two weeks ago in Unix timestamp format' TWO_WEEK_AGO=$(date -d '2 week ago' +%s) -# Fetch all releases and create directories for those published in the last week +: 'Fetch all releases and create directories for those published in the last week' curl -sS $API_URL | jq -c '.[]' | while read -r release; do TAG_NAME=$(echo "$release" | jq -r '.tag_name') PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') @@ -33,10 +35,10 @@ curl -sS $API_URL | jq -c '.[]' | while read -r release; do fi done -# Initialize array for tracking failed uploads +: 'Initialize array for tracking failed uploads' declare -a failed_uploads -# Function to send Slack alert with failed uploads +: 'Function to send Slack alert with failed uploads' send_slack_alert_with_failed() { local failure_count=${#failed_uploads[@]} local message="šŸšØ Fileoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" @@ -46,7 +48,7 @@ send_slack_alert_with_failed() { https://slack.com/api/files.upload } -# Loop through all version directories for downloading assets and S3 upload +: 'Loop through each version directory to process and upload assets' while IFS= read -r version_dir; do TAG_NAME=${version_dir#"$BASE_FOLDER"/} VERSION_DIR="$version_dir" @@ -56,7 +58,7 @@ while IFS= read -r version_dir; do release=$(curl -sS $API_URL | jq -c --arg TAG_NAME "$TAG_NAME" '.[] | select(.tag_name==$TAG_NAME)') ASSETS=$(echo "$release" | jq -c '.assets[]') - # Download assets for this release + : 'Download assets for this release' pushd "$VERSION_DIR" > /dev/null echo "Processing assets for $TAG_NAME..." if [ -z "$ASSETS" ]; then @@ -72,7 +74,7 @@ while IFS= read -r version_dir; do wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" || echo "Failed to download $FILE_NAME" fi - # S3 upload logic for each file + : 'Compare the downloaded file with the one in S3; upload if different' echo "Checking $FILE_NAME against S3 version..." TEMP_S3_DIR=$(mktemp -d) s3cmd get --no-progress "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" || true @@ -99,7 +101,7 @@ while IFS= read -r version_dir; do fi done < <(find "$BASE_FOLDER" -mindepth 1 -type d) -# Send summary alert only if there were failed uploads +: 'Send summary alert only if there were failed uploads' if [ ${#failed_uploads[@]} -ne 0 ]; then send_slack_alert_with_failed else From 0270e2a2547ecc70ed7d727cd1712ef80de2e226 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 2 Jan 2024 12:47:15 +0100 Subject: [PATCH 22/56] nits: filter downloaded releases Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 88 +++++++++++++++++--------------- 1 file changed, 48 insertions(+), 40 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 80541bd40..269600d93 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -1,6 +1,7 @@ #!/bin/bash -# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past two weeks. +# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past two years, +# It respects GitHub API rate limits and paginates requests. # It performs the following operations: # - Downloads release assets from GitHub. # - Compares these assets with the existing ones in an S3 bucket. @@ -13,41 +14,48 @@ export DEBIAN_FRONTEND=noninteractive BASE_FOLDER="$(pwd)/releases/actors" API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" +LIST_FILE="$(pwd)/release_list_for_review.txt" FAILED_LOG="$(pwd)/failed_uploads.log" +TWO_YEARS_AGO=$(date -d '2 years ago' +%s) - : 'Create base directory and log file' mkdir -p "$BASE_FOLDER" +true > "$LIST_FILE" -: 'Calculate the date two weeks ago in Unix timestamp format' -TWO_WEEK_AGO=$(date -d '2 week ago' +%s) - -: 'Fetch all releases and create directories for those published in the last week' -curl -sS $API_URL | jq -c '.[]' | while read -r release; do - TAG_NAME=$(echo "$release" | jq -r '.tag_name') - PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') +# Function to extract the next page URL from GitHub API response headers for pagination. +get_next_page_url() { + local headers="$1" + echo "$headers" | grep -oP '<\K([^>]+)(?=>; rel="next")' || echo "" +} - # Convert PUBLISHED_DATE to seconds since the epoch for comparison - PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) +# Function to fetch and process releases +fetch_and_process_releases() { + local api_url="$1" + local page_url="$api_url" + + while [[ -n $page_url ]]; do + response=$(curl -s -I "$page_url") + body=$(curl -s "$page_url") + next_page_url=$(get_next_page_url "$response") + + echo "$body" | jq -c '.[]' | while read -r release; do + TAG_NAME=$(echo "$release" | jq -r '.tag_name') + PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') + PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) + + if echo "$TAG_NAME" | grep -qE '^v[0-9]+\.[0-9]+\.[0-9]+.*$' && [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_YEARS_AGO" ]]; then + mkdir -p "$BASE_FOLDER/$TAG_NAME" + echo "$TAG_NAME" >> "$LIST_FILE" + fi + done + + page_url="$next_page_url" + done +} - # Check if PUBLISHED_DATE_SEC is equal to or more recent than TWO_WEEK_AGO - if [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_WEEK_AGO" ]]; then - mkdir -p "$BASE_FOLDER/$TAG_NAME" - fi -done +fetch_and_process_releases "$API_URL" -: 'Initialize array for tracking failed uploads' declare -a failed_uploads -: 'Function to send Slack alert with failed uploads' -send_slack_alert_with_failed() { - local failure_count=${#failed_uploads[@]} - local message="šŸšØ Fileoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" - - curl -F file=@"$FAILED_LOG" -F "initial_comment=$message" -F channels="$SLACK_CHANNEL" \ - -H "Authorization: Bearer $SLACK_API_TOKEN" \ - https://slack.com/api/files.upload -} - : 'Loop through each version directory to process and upload assets' while IFS= read -r version_dir; do TAG_NAME=${version_dir#"$BASE_FOLDER"/} @@ -55,15 +63,16 @@ while IFS= read -r version_dir; do if [ -d "$VERSION_DIR" ]; then echo "Entering directory: $VERSION_DIR" - release=$(curl -sS $API_URL | jq -c --arg TAG_NAME "$TAG_NAME" '.[] | select(.tag_name==$TAG_NAME)') - ASSETS=$(echo "$release" | jq -c '.assets[]') + tag_url="$API_URL/tags/$TAG_NAME" + release=$(curl -sS "$tag_url") + + # Check if the assets array is not null + if [[ $(echo "$release" | jq '.assets') != "null" ]]; then + ASSETS=$(echo "$release" | jq -c '.assets[]') + + pushd "$VERSION_DIR" > /dev/null + echo "Processing assets for $TAG_NAME..." - : 'Download assets for this release' - pushd "$VERSION_DIR" > /dev/null - echo "Processing assets for $TAG_NAME..." - if [ -z "$ASSETS" ]; then - echo "No assets found for $TAG_NAME." - else echo "$ASSETS" | while IFS= read -r asset; do DOWNLOAD_URL=$(echo "$asset" | jq -r '.browser_download_url') FILE_NAME=$(echo "$asset" | jq -r '.name') @@ -74,30 +83,29 @@ while IFS= read -r version_dir; do wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" || echo "Failed to download $FILE_NAME" fi - : 'Compare the downloaded file with the one in S3; upload if different' echo "Checking $FILE_NAME against S3 version..." TEMP_S3_DIR=$(mktemp -d) s3cmd get --no-progress "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" || true if cmp --silent "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME"; then echo "$FILE_NAME is the same in S3, skipping..." - rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" else echo "Local $FILE_NAME is different. Uploading to S3..." if s3cmd --acl-public put "$FILE_NAME" "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME"; then echo "Uploaded $FILE_NAME to s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" - rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" else echo "Failed to upload $FILE_NAME. Logging to $FAILED_LOG" echo "$TAG_NAME/$FILE_NAME" >> "$FAILED_LOG" failed_uploads+=("$TAG_NAME/$FILE_NAME") - rm "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" fi fi rm -rf "$TEMP_S3_DIR" + rm -f "$FILE_NAME" done + popd > /dev/null + else + echo "No assets found for $TAG_NAME." fi - popd > /dev/null fi done < <(find "$BASE_FOLDER" -mindepth 1 -type d) From 01b6612987a249c35234c4d354e9c40f92492543 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 2 Jan 2024 16:59:01 +0100 Subject: [PATCH 23/56] nits: remove proof lost files Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 269600d93..2dbababeb 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -14,12 +14,10 @@ export DEBIAN_FRONTEND=noninteractive BASE_FOLDER="$(pwd)/releases/actors" API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" -LIST_FILE="$(pwd)/release_list_for_review.txt" FAILED_LOG="$(pwd)/failed_uploads.log" -TWO_YEARS_AGO=$(date -d '2 years ago' +%s) +TWO_YEARS_AGO=$(date -d '3 years ago' +%s) mkdir -p "$BASE_FOLDER" -true > "$LIST_FILE" # Function to extract the next page URL from GitHub API response headers for pagination. get_next_page_url() { @@ -44,7 +42,6 @@ fetch_and_process_releases() { if echo "$TAG_NAME" | grep -qE '^v[0-9]+\.[0-9]+\.[0-9]+.*$' && [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_YEARS_AGO" ]]; then mkdir -p "$BASE_FOLDER/$TAG_NAME" - echo "$TAG_NAME" >> "$LIST_FILE" fi done From d7c5765188d59d09907edf68ea0f5e712340b4d4 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 5 Jan 2024 20:49:07 +0100 Subject: [PATCH 24/56] fix: make review adjustment Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 6 +-- scripts/mirror-builtin-actors.sh | 60 ++++++++++++--------- 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 039084331..904a0181b 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -31,8 +31,7 @@ jobs: run: bash ./mirror-builtin-actors.sh env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#forest-dump" - ENVIROMENT: digitalocean + SLACK_CHANNEL: "#forest-notifications" BUCKET_NAME: filecoin-builtin-actors mirror-releases-cf: @@ -56,6 +55,5 @@ jobs: run: bash ./mirror-builtin-actors.sh env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#forest-dump" - ENVIROMENT: cloudflare + SLACK_CHANNEL: "#forest-notifications" BUCKET_NAME: filecoin-builtin-actors diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index 2dbababeb..e7123d9ec 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -1,6 +1,6 @@ #!/bin/bash -# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past two years, +# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past three years, # It respects GitHub API rate limits and paginates requests. # It performs the following operations: # - Downloads release assets from GitHub. @@ -8,40 +8,50 @@ # - Uploads new or updated assets to the S3 bucket. # - Sends an alert to Slack if any uploads fail. -set -eo pipefail +set -euo pipefail export DEBIAN_FRONTEND=noninteractive BASE_FOLDER="$(pwd)/releases/actors" API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" FAILED_LOG="$(pwd)/failed_uploads.log" -TWO_YEARS_AGO=$(date -d '3 years ago' +%s) +THREE_YEARS_AGO=$(date --date='3 years ago' +%s) -mkdir -p "$BASE_FOLDER" +mkdir --parents "$BASE_FOLDER" # Function to extract the next page URL from GitHub API response headers for pagination. get_next_page_url() { local headers="$1" - echo "$headers" | grep -oP '<\K([^>]+)(?=>; rel="next")' || echo "" + echo "$headers" | grep --only-matching --perl-regexp '<\K([^>]+)(?=>; rel="next")' || echo "" +} + +# Function to send Slack alert with failed uploads +send_slack_alert_with_failed() { + local failure_count=${#failed_uploads[@]} + local message="šŸšØ Filecoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" + + # Attach the log file with failed uploads + curl --form file=@"$FAILED_LOG" --form "initial_comment=$message" --form channels="$SLACK_CHANNEL" \ + --header "Authorization: Bearer $SLACK_API_TOKEN" \ + https://slack.com/api/files.upload } # Function to fetch and process releases fetch_and_process_releases() { - local api_url="$1" - local page_url="$api_url" + local page_url="$API_URL" while [[ -n $page_url ]]; do - response=$(curl -s -I "$page_url") - body=$(curl -s "$page_url") + response=$(curl --silent --head "$page_url") + body=$(curl --silent "$page_url") next_page_url=$(get_next_page_url "$response") - echo "$body" | jq -c '.[]' | while read -r release; do - TAG_NAME=$(echo "$release" | jq -r '.tag_name') - PUBLISHED_DATE=$(echo "$release" | jq -r '.published_at') - PUBLISHED_DATE_SEC=$(date -d "$PUBLISHED_DATE" +%s) + echo "$body" | jq --compact-output '.[]' | while read -r release; do + TAG_NAME=$(echo "$release" | jq --raw-output '.tag_name') + PUBLISHED_DATE=$(echo "$release" | jq --raw-output '.published_at') + PUBLISHED_DATE_SEC=$(date --date="$PUBLISHED_DATE" +%s) - if echo "$TAG_NAME" | grep -qE '^v[0-9]+\.[0-9]+\.[0-9]+.*$' && [[ "$PUBLISHED_DATE_SEC" -ge "$TWO_YEARS_AGO" ]]; then - mkdir -p "$BASE_FOLDER/$TAG_NAME" + if echo "$TAG_NAME" | grep --extended-regexp '^v[0-9]+\.[0-9]+\.[0-9]+.*$' && [[ "$PUBLISHED_DATE_SEC" -ge "$THREE_YEARS_AGO" ]]; then + mkdir --parents "$BASE_FOLDER/$TAG_NAME" fi done @@ -49,7 +59,7 @@ fetch_and_process_releases() { done } -fetch_and_process_releases "$API_URL" +fetch_and_process_releases declare -a failed_uploads @@ -61,27 +71,27 @@ while IFS= read -r version_dir; do echo "Entering directory: $VERSION_DIR" tag_url="$API_URL/tags/$TAG_NAME" - release=$(curl -sS "$tag_url") + release=$(curl --silent "$tag_url") # Check if the assets array is not null if [[ $(echo "$release" | jq '.assets') != "null" ]]; then - ASSETS=$(echo "$release" | jq -c '.assets[]') + ASSETS=$(echo "$release" | jq --compact-output '.assets[]') pushd "$VERSION_DIR" > /dev/null echo "Processing assets for $TAG_NAME..." echo "$ASSETS" | while IFS= read -r asset; do - DOWNLOAD_URL=$(echo "$asset" | jq -r '.browser_download_url') - FILE_NAME=$(echo "$asset" | jq -r '.name') + DOWNLOAD_URL=$(echo "$asset" | jq --raw-output '.browser_download_url') + FILE_NAME=$(echo "$asset" | jq --raw-output '.name') echo "Checking asset: $FILE_NAME" if [ ! -f "$FILE_NAME" ]; then echo "Downloading $FILE_NAME..." - wget -q "$DOWNLOAD_URL" -O "$FILE_NAME" || echo "Failed to download $FILE_NAME" + curl --silent --output "$FILE_NAME" "$DOWNLOAD_URL" || echo "Failed to download $FILE_NAME" fi echo "Checking $FILE_NAME against S3 version..." - TEMP_S3_DIR=$(mktemp -d) + TEMP_S3_DIR=$(mktemp --directory) s3cmd get --no-progress "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" || true if cmp --silent "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME"; then @@ -96,15 +106,15 @@ while IFS= read -r version_dir; do failed_uploads+=("$TAG_NAME/$FILE_NAME") fi fi - rm -rf "$TEMP_S3_DIR" - rm -f "$FILE_NAME" + rm --recursive --force "$TEMP_S3_DIR" + rm --force "$FILE_NAME" done popd > /dev/null else echo "No assets found for $TAG_NAME." fi fi -done < <(find "$BASE_FOLDER" -mindepth 1 -type d) +done < <(find "$BASE_FOLDER" --mindepth 1 --type d) : 'Send summary alert only if there were failed uploads' if [ ${#failed_uploads[@]} -ne 0 ]; then From d4e598e013f1a960a99cf42cd6a7d023dd3abce5 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Mon, 8 Jan 2024 13:08:32 +0100 Subject: [PATCH 25/56] nits: more review changes Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index e7123d9ec..bd2f287e5 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -46,7 +46,7 @@ fetch_and_process_releases() { next_page_url=$(get_next_page_url "$response") echo "$body" | jq --compact-output '.[]' | while read -r release; do - TAG_NAME=$(echo "$release" | jq --raw-output '.tag_name') + TAG_NAME=$(echo "$release" | jq --raw-output '.tag_name') || echo "Error: $release, could not get tag name" PUBLISHED_DATE=$(echo "$release" | jq --raw-output '.published_at') PUBLISHED_DATE_SEC=$(date --date="$PUBLISHED_DATE" +%s) @@ -62,6 +62,7 @@ fetch_and_process_releases() { fetch_and_process_releases declare -a failed_uploads +failed_uploads=() : 'Loop through each version directory to process and upload assets' while IFS= read -r version_dir; do @@ -114,7 +115,7 @@ while IFS= read -r version_dir; do echo "No assets found for $TAG_NAME." fi fi -done < <(find "$BASE_FOLDER" --mindepth 1 --type d) +done < <(find "$BASE_FOLDER" -mindepth 1 -type d) : 'Send summary alert only if there were failed uploads' if [ ${#failed_uploads[@]} -ne 0 ]; then From 718a724ffb9061bbab6392f97f036b9df24490dc Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 9 Jan 2024 08:47:41 +0100 Subject: [PATCH 26/56] nits: fix curl zero byte download Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh index bd2f287e5..170997678 100755 --- a/scripts/mirror-builtin-actors.sh +++ b/scripts/mirror-builtin-actors.sh @@ -88,7 +88,8 @@ while IFS= read -r version_dir; do echo "Checking asset: $FILE_NAME" if [ ! -f "$FILE_NAME" ]; then echo "Downloading $FILE_NAME..." - curl --silent --output "$FILE_NAME" "$DOWNLOAD_URL" || echo "Failed to download $FILE_NAME" + curl --location --silent --show-error --output "$FILE_NAME" "$DOWNLOAD_URL" || echo "Failed to download $FILE_NAME" + fi echo "Checking $FILE_NAME against S3 version..." From ed891bbff01f695c221effb689c40df7437086d2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Wed, 10 Jan 2024 14:24:31 +0100 Subject: [PATCH 27/56] nits: use secret variable Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 904a0181b..e2140dbfe 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -47,7 +47,7 @@ jobs: region: auto access_key: ${{ secrets.R2_ACCESS_KEY }} secret_key: ${{ secrets.R2_SECRET_KEY }} - account_id: "2238a825c5aca59233eab1f221f7aefb" + account_id: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - name: Run script to mirror to Cloudflare working-directory: scripts From 2940dd2c955b873a163c0985cd7eaaafef1925fa Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:32:01 +0100 Subject: [PATCH 28/56] chore: rewrite in python Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 88 +-- .../mirror-actors/mirror_actors/__main__.py | 61 ++ scripts/mirror-actors/poetry.lock | 560 ++++++++++++++++++ scripts/mirror-actors/pyproject.toml | 18 + scripts/mirror-builtin-actors.sh | 126 ---- 5 files changed, 693 insertions(+), 160 deletions(-) create mode 100644 scripts/mirror-actors/mirror_actors/__main__.py create mode 100644 scripts/mirror-actors/poetry.lock create mode 100644 scripts/mirror-actors/pyproject.toml delete mode 100755 scripts/mirror-builtin-actors.sh diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index e2140dbfe..f03a2662a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -4,10 +4,10 @@ on: - cron: '0 * * * *' # Runs every hour pull_request: paths: - - 'scripts/mirror-builtin-actors.sh' + - 'scripts/mirror-actors/**' push: paths: - - 'scripts/mirror-builtin-actors.sh' + - 'scripts/mirror-actors/**' workflow_dispatch: jobs: @@ -15,45 +15,65 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - - name: Set up S3cmd cli tool for DigitalOcean - uses: s3-actions/s3cmd@v1.5.0 + - name: Install Python + uses: actions/setup-python@v5 with: - provider: digitalocean - region: fra1 - access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} - secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + python-version: '3.12' - - name: Run mirroring script to DigitalOcean - working-directory: scripts - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh - env: - SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - SLACK_CHANNEL: "#forest-notifications" - BUCKET_NAME: filecoin-builtin-actors - - mirror-releases-cf: - runs-on: ubuntu-latest - steps: - - name: Checkout source code - uses: actions/checkout@v2 + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true - - name: Set up S3cmd cli tool for Cloudflare - uses: s3-actions/s3cmd@v1.5.0 + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 with: - provider: cloudflare - region: auto - access_key: ${{ secrets.R2_ACCESS_KEY }} - secret_key: ${{ secrets.R2_SECRET_KEY }} - account_id: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - - - name: Run script to mirror to Cloudflare - working-directory: scripts + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + - name: Run tests + working-directory: scripts/mirror-actors if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: bash ./mirror-builtin-actors.sh + run: | + source .venv/bin/activate + python3 mirror_actors/ env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-notifications" BUCKET_NAME: filecoin-builtin-actors + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + REGION_NAME: fra1 + + # mirror-releases-cf: + # runs-on: ubuntu-latest + # steps: + # - name: Checkout source code + # uses: actions/checkout@v4 + + # - name: Set up S3cmd cli tool for Cloudflare + # uses: s3-actions/s3cmd@v1.5.0 + # with: + # provider: cloudflare + # region: auto + # access_key: ${{ secrets.R2_ACCESS_KEY }} + # secret_key: ${{ secrets.R2_SECRET_KEY }} + # account_id: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + + # - name: Run script to mirror to Cloudflare + # working-directory: scripts + # if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + # run: bash ./mirror-builtin-actors.sh + # env: + # SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} + # SLACK_CHANNEL: "#forest-notifications" + # BUCKET_NAME: filecoin-builtin-actors diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py new file mode 100644 index 000000000..c4cf26dde --- /dev/null +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -0,0 +1,61 @@ +import os +import re +import requests +from datetime import datetime +from dateutil.relativedelta import relativedelta +import boto3 +from slack_sdk.web import WebClient +from github import Github + +# Define environment variables +SLACK_TOKEN = os.environ["SLACK_TOKEN"] +SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] +BUCKET_NAME = os.environ["BUCKET_NAME"] + +GITHUB_REPO = "filecoin-project/builtin-actors" +release_pattern = r'^v\d+\.\d+\.\d+.*$' + +# Initialize clients +slack = WebClient(token=SLACK_TOKEN) +github = Github() +s3 = boto3.client("s3", + endpoint_url='https://fra1.digitaloceanspaces.com') + +# Calculate the cutoff date (3 years ago from current date) +three_years_ago = datetime.now() - relativedelta(years=3) + + +def send_slack_alert(message): + slack.chat_postMessage( + channel=SLACK_CHANNEL, + text=message + ).validate() + +# Process GitHub releases +try: + releases = github.get_repo(GITHUB_REPO).get_releases() + # Fetch already mirrored objects from S3 + s3_response = s3.list_objects(Bucket=BUCKET_NAME) + already_mirrored = set() + if 'Contents' in s3_response: + already_mirrored = set(obj["Key"] for obj in s3_response["Contents"]) + + for release in releases: + tag_name = release.tag_name + published_at = release.published_at.replace(tzinfo=None) # Removing timezone info for comparison + + # Skip the release if it's older than 3 years + if published_at < three_years_ago: + continue + + if re.match(release_pattern, tag_name): + for asset in release.get_assets(): + key = f"{tag_name}/{asset.name}" + if key not in already_mirrored: + response = requests.get(asset.browser_download_url) + response.raise_for_status() + s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) + +except Exception as e: + send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") + raise diff --git a/scripts/mirror-actors/poetry.lock b/scripts/mirror-actors/poetry.lock new file mode 100644 index 000000000..5ba142a87 --- /dev/null +++ b/scripts/mirror-actors/poetry.lock @@ -0,0 +1,560 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "boto3" +version = "1.34.22" +description = "The AWS SDK for Python" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "boto3-1.34.22-py3-none-any.whl", hash = "sha256:5909cd1393143576265c692e908a9ae495492c04a0ffd4bae8578adc2e44729e"}, + {file = "boto3-1.34.22.tar.gz", hash = "sha256:a98c0b86f6044ff8314cc2361e1ef574d674318313ab5606ccb4a6651c7a3f8c"}, +] + +[package.dependencies] +botocore = ">=1.34.22,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.22" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "botocore-1.34.22-py3-none-any.whl", hash = "sha256:e5f7775975b9213507fbcf846a96b7a2aec2a44fc12a44585197b014a4ab0889"}, + {file = "botocore-1.34.22.tar.gz", hash = "sha256:c47ba4286c576150d1b6ca6df69a87b5deff3d23bd84da8bcf8431ebac3c40ba"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.19.19)"] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "cryptography" +version = "41.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygithub" +version = "2.1.1" +description = "Use the full Github API v3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyGithub-2.1.1-py3-none-any.whl", hash = "sha256:4b528d5d6f35e991ea5fd3f942f58748f24938805cb7fcf24486546637917337"}, + {file = "PyGithub-2.1.1.tar.gz", hash = "sha256:ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c"}, +] + +[package.dependencies] +Deprecated = "*" +pyjwt = {version = ">=2.4.0", extras = ["crypto"]} +pynacl = ">=1.4.0" +python-dateutil = "*" +requests = ">=2.14.0" +typing-extensions = ">=4.0.0" +urllib3 = ">=1.26.0" + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "s3transfer" +version = "0.10.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, + {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "slack-sdk" +version = "3.26.2" +description = "The Slack API Platform SDK for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "slack_sdk-3.26.2-py2.py3-none-any.whl", hash = "sha256:a10e8ee69ca17d274989d0c2bbecb875f19898da3052d8d57de0898a00b1ab52"}, + {file = "slack_sdk-3.26.2.tar.gz", hash = "sha256:bcdac5e688fa50e9357ecd00b803b6a8bad766aa614d35d8dc0636f40adc48bf"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "3580c003f3e353816bc928849f63736ceec61b7e958772640536de8f6d94d1f3" diff --git a/scripts/mirror-actors/pyproject.toml b/scripts/mirror-actors/pyproject.toml new file mode 100644 index 000000000..e0643c9c7 --- /dev/null +++ b/scripts/mirror-actors/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "mirror-actors" +version = "0.1.0" +description = "" +authors = ["samuelarogbonlo "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +requests = "^2.31.0" +slack-sdk = "^3.26.2" +boto3 = "^1.34.22" +pygithub = "^2.1.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/scripts/mirror-builtin-actors.sh b/scripts/mirror-builtin-actors.sh deleted file mode 100755 index 170997678..000000000 --- a/scripts/mirror-builtin-actors.sh +++ /dev/null @@ -1,126 +0,0 @@ -#!/bin/bash - -# This script mirrors all releases of Filecoin's builtin-actors that have been updated in the past three years, -# It respects GitHub API rate limits and paginates requests. -# It performs the following operations: -# - Downloads release assets from GitHub. -# - Compares these assets with the existing ones in an S3 bucket. -# - Uploads new or updated assets to the S3 bucket. -# - Sends an alert to Slack if any uploads fail. - -set -euo pipefail - -export DEBIAN_FRONTEND=noninteractive - -BASE_FOLDER="$(pwd)/releases/actors" -API_URL="https://api.github.com/repos/filecoin-project/builtin-actors/releases" -FAILED_LOG="$(pwd)/failed_uploads.log" -THREE_YEARS_AGO=$(date --date='3 years ago' +%s) - -mkdir --parents "$BASE_FOLDER" - -# Function to extract the next page URL from GitHub API response headers for pagination. -get_next_page_url() { - local headers="$1" - echo "$headers" | grep --only-matching --perl-regexp '<\K([^>]+)(?=>; rel="next")' || echo "" -} - -# Function to send Slack alert with failed uploads -send_slack_alert_with_failed() { - local failure_count=${#failed_uploads[@]} - local message="šŸšØ Filecoin Actors Mirror Update:\nšŸ”„ Failed Uploads: $failure_count" - - # Attach the log file with failed uploads - curl --form file=@"$FAILED_LOG" --form "initial_comment=$message" --form channels="$SLACK_CHANNEL" \ - --header "Authorization: Bearer $SLACK_API_TOKEN" \ - https://slack.com/api/files.upload -} - -# Function to fetch and process releases -fetch_and_process_releases() { - local page_url="$API_URL" - - while [[ -n $page_url ]]; do - response=$(curl --silent --head "$page_url") - body=$(curl --silent "$page_url") - next_page_url=$(get_next_page_url "$response") - - echo "$body" | jq --compact-output '.[]' | while read -r release; do - TAG_NAME=$(echo "$release" | jq --raw-output '.tag_name') || echo "Error: $release, could not get tag name" - PUBLISHED_DATE=$(echo "$release" | jq --raw-output '.published_at') - PUBLISHED_DATE_SEC=$(date --date="$PUBLISHED_DATE" +%s) - - if echo "$TAG_NAME" | grep --extended-regexp '^v[0-9]+\.[0-9]+\.[0-9]+.*$' && [[ "$PUBLISHED_DATE_SEC" -ge "$THREE_YEARS_AGO" ]]; then - mkdir --parents "$BASE_FOLDER/$TAG_NAME" - fi - done - - page_url="$next_page_url" - done -} - -fetch_and_process_releases - -declare -a failed_uploads -failed_uploads=() - -: 'Loop through each version directory to process and upload assets' -while IFS= read -r version_dir; do - TAG_NAME=${version_dir#"$BASE_FOLDER"/} - VERSION_DIR="$version_dir" - if [ -d "$VERSION_DIR" ]; then - echo "Entering directory: $VERSION_DIR" - - tag_url="$API_URL/tags/$TAG_NAME" - release=$(curl --silent "$tag_url") - - # Check if the assets array is not null - if [[ $(echo "$release" | jq '.assets') != "null" ]]; then - ASSETS=$(echo "$release" | jq --compact-output '.assets[]') - - pushd "$VERSION_DIR" > /dev/null - echo "Processing assets for $TAG_NAME..." - - echo "$ASSETS" | while IFS= read -r asset; do - DOWNLOAD_URL=$(echo "$asset" | jq --raw-output '.browser_download_url') - FILE_NAME=$(echo "$asset" | jq --raw-output '.name') - - echo "Checking asset: $FILE_NAME" - if [ ! -f "$FILE_NAME" ]; then - echo "Downloading $FILE_NAME..." - curl --location --silent --show-error --output "$FILE_NAME" "$DOWNLOAD_URL" || echo "Failed to download $FILE_NAME" - - fi - - echo "Checking $FILE_NAME against S3 version..." - TEMP_S3_DIR=$(mktemp --directory) - s3cmd get --no-progress "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME" || true - - if cmp --silent "$FILE_NAME" "$TEMP_S3_DIR/$FILE_NAME"; then - echo "$FILE_NAME is the same in S3, skipping..." - else - echo "Local $FILE_NAME is different. Uploading to S3..." - if s3cmd --acl-public put "$FILE_NAME" "s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME"; then - echo "Uploaded $FILE_NAME to s3://$BUCKET_NAME/$TAG_NAME/$FILE_NAME" - else - echo "Failed to upload $FILE_NAME. Logging to $FAILED_LOG" - echo "$TAG_NAME/$FILE_NAME" >> "$FAILED_LOG" - failed_uploads+=("$TAG_NAME/$FILE_NAME") - fi - fi - rm --recursive --force "$TEMP_S3_DIR" - rm --force "$FILE_NAME" - done - popd > /dev/null - else - echo "No assets found for $TAG_NAME." - fi - fi -done < <(find "$BASE_FOLDER" -mindepth 1 -type d) - -: 'Send summary alert only if there were failed uploads' -if [ ${#failed_uploads[@]} -ne 0 ]; then - send_slack_alert_with_failed -else - echo "No new mirroring failures" -fi From 829adb37b9d236aa0b4c20f7ef66170568f6d33d Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:35:08 +0100 Subject: [PATCH 29/56] ci fix working directory Signed-off-by: samuelarogbonlo --- scripts/mirror-builtin-actors.py | 62 ++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100755 scripts/mirror-builtin-actors.py diff --git a/scripts/mirror-builtin-actors.py b/scripts/mirror-builtin-actors.py new file mode 100755 index 000000000..26505e2cf --- /dev/null +++ b/scripts/mirror-builtin-actors.py @@ -0,0 +1,62 @@ +import os +import re +import requests +from datetime import datetime +from dateutil.relativedelta import relativedelta +import boto3 +from slack_sdk.web import WebClient +from github import Github + +# Define environment variables +SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] +SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] +S3_BUCKET = os.environ["S3_BUCKET"] +REGION_NAME = os.environ["REGION_NAME"] + +GITHUB_REPO = "filecoin-project/builtin-actors" +release_pattern = r'^v\d+\.\d+\.\d+.*$' + +# Initialize clients +slack = WebClient(token=SLACK_API_TOKEN) +github = Github() +s3 = boto3.client("s3", + endpoint_url='https://fra1.digitaloceanspaces.com') + +# Calculate the cutoff date (3 years ago from current date) +three_years_ago = datetime.now() - relativedelta(years=3) + + +def send_slack_alert(message): + slack.chat_postMessage( + channel=SLACK_CHANNEL, + text=message + ).validate() + +# Process GitHub releases +try: + releases = github.get_repo(GITHUB_REPO).get_releases() + # Fetch already mirrored objects from S3 + s3_response = s3.list_objects(Bucket=S3_BUCKET) + already_mirrored = set() + if 'Contents' in s3_response: + already_mirrored = set(obj["Key"] for obj in s3_response["Contents"]) + + for release in releases: + tag_name = release.tag_name + published_at = release.published_at.replace(tzinfo=None) # Removing timezone info for comparison + + # Skip the release if it's older than 3 years + if published_at < three_years_ago: + continue + + if re.match(release_pattern, tag_name): + for asset in release.get_assets(): + key = f"{tag_name}/{asset.name}" + if key not in already_mirrored: + response = requests.get(asset.browser_download_url) + response.raise_for_status() + s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) + +except Exception as e: + send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") + raise From 7a8183a69a8a2f158fb326ec12900e5af8288f1c Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:37:22 +0100 Subject: [PATCH 30/56] ci fix working directory Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index f03a2662a..fb2c9e61a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -37,6 +37,7 @@ jobs: key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - name: Install dependencies + working-directory: scripts/mirror-actors if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction --no-root From 329ee67f9908a16003c2aab63433dc09979f6ebe Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:39:36 +0100 Subject: [PATCH 31/56] test deploy Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index fb2c9e61a..c2f4c2ff6 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -43,7 +43,7 @@ jobs: - name: Run tests working-directory: scripts/mirror-actors - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + # if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: | source .venv/bin/activate python3 mirror_actors/ From 2991ace870599e2cbc4a9374f7551d8e26cc8a24 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:42:17 +0100 Subject: [PATCH 32/56] fix wrong env name Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/mirror_actors/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index c4cf26dde..4b7160cad 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -8,7 +8,7 @@ from github import Github # Define environment variables -SLACK_TOKEN = os.environ["SLACK_TOKEN"] +SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] BUCKET_NAME = os.environ["BUCKET_NAME"] @@ -16,7 +16,7 @@ release_pattern = r'^v\d+\.\d+\.\d+.*$' # Initialize clients -slack = WebClient(token=SLACK_TOKEN) +slack = WebClient(token=SLACK_API_TOKEN) github = Github() s3 = boto3.client("s3", endpoint_url='https://fra1.digitaloceanspaces.com') From 1d2b887319ec18ebd8214de0dca842f2e5145997 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 12:49:50 +0100 Subject: [PATCH 33/56] sily me forgetting to update new variables Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/mirror_actors/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 4b7160cad..fd506612d 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -54,7 +54,7 @@ def send_slack_alert(message): if key not in already_mirrored: response = requests.get(asset.browser_download_url) response.raise_for_status() - s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) + s3.put_object(Bucket=BUCKET_NAME, Key=key, Body=response.content) except Exception as e: send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") From 67ecd12d07268b0f9894db0223c95a8187cc68e2 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 13:42:32 +0100 Subject: [PATCH 34/56] remove test py and mirror to cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 66 ++++++++++--------- .../mirror-actors/mirror_actors/__main__.py | 9 ++- scripts/mirror-builtin-actors.py | 62 ----------------- 3 files changed, 42 insertions(+), 95 deletions(-) delete mode 100755 scripts/mirror-builtin-actors.py diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index c2f4c2ff6..c357536f0 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -29,21 +29,13 @@ jobs: virtualenvs-in-project: true installer-parallel: true - - name: Load cached venv - id: cached-poetry-dependencies - uses: actions/cache@v3 - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Install dependencies working-directory: scripts/mirror-actors - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction --no-root - name: Run tests working-directory: scripts/mirror-actors - # if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: | source .venv/bin/activate python3 mirror_actors/ @@ -54,27 +46,41 @@ jobs: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} REGION_NAME: fra1 + ENDPOINT_URL: https://fra1.digitaloceanspaces.com - # mirror-releases-cf: - # runs-on: ubuntu-latest - # steps: - # - name: Checkout source code - # uses: actions/checkout@v4 + mirror-releases-cf: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 - # - name: Set up S3cmd cli tool for Cloudflare - # uses: s3-actions/s3cmd@v1.5.0 - # with: - # provider: cloudflare - # region: auto - # access_key: ${{ secrets.R2_ACCESS_KEY }} - # secret_key: ${{ secrets.R2_SECRET_KEY }} - # account_id: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + - name: Install Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' - # - name: Run script to mirror to Cloudflare - # working-directory: scripts - # if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - # run: bash ./mirror-builtin-actors.sh - # env: - # SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} - # SLACK_CHANNEL: "#forest-notifications" - # BUCKET_NAME: filecoin-builtin-actors + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Install dependencies + working-directory: scripts/mirror-actors + run: poetry install --no-interaction --no-root + + - name: Mirror Actors to CloudFlare + working-directory: scripts/mirror-actors + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + run: | + source .venv/bin/activate + python3 mirror_actors/ + env: + SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} + SLACK_CHANNEL: "#forest-notifications" + BUCKET_NAME: filecoin-builtin-actors + AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_KEY }} + REGION_NAME: "auto" + ENDPOINT_URL: "https://2238a825c5aca59233eab1f221f7aefb.r2.cloudflarestorage.com" diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index fd506612d..2d2b19d1b 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -10,7 +10,9 @@ # Define environment variables SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] -BUCKET_NAME = os.environ["BUCKET_NAME"] +S3_BUCKET = os.environ["S3_BUCKET"] +ENDPOINT_URL = os.environ["ENDPOINT_URL"] +REGION_NAME = os.environ["REGION_NAME"] GITHUB_REPO = "filecoin-project/builtin-actors" release_pattern = r'^v\d+\.\d+\.\d+.*$' @@ -18,8 +20,9 @@ # Initialize clients slack = WebClient(token=SLACK_API_TOKEN) github = Github() -s3 = boto3.client("s3", - endpoint_url='https://fra1.digitaloceanspaces.com') +s3 = boto3.client('s3', + region_name=REGION_NAME, + endpoint_url=ENDPOINT_URL) # Calculate the cutoff date (3 years ago from current date) three_years_ago = datetime.now() - relativedelta(years=3) diff --git a/scripts/mirror-builtin-actors.py b/scripts/mirror-builtin-actors.py deleted file mode 100755 index 26505e2cf..000000000 --- a/scripts/mirror-builtin-actors.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import re -import requests -from datetime import datetime -from dateutil.relativedelta import relativedelta -import boto3 -from slack_sdk.web import WebClient -from github import Github - -# Define environment variables -SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] -SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] -S3_BUCKET = os.environ["S3_BUCKET"] -REGION_NAME = os.environ["REGION_NAME"] - -GITHUB_REPO = "filecoin-project/builtin-actors" -release_pattern = r'^v\d+\.\d+\.\d+.*$' - -# Initialize clients -slack = WebClient(token=SLACK_API_TOKEN) -github = Github() -s3 = boto3.client("s3", - endpoint_url='https://fra1.digitaloceanspaces.com') - -# Calculate the cutoff date (3 years ago from current date) -three_years_ago = datetime.now() - relativedelta(years=3) - - -def send_slack_alert(message): - slack.chat_postMessage( - channel=SLACK_CHANNEL, - text=message - ).validate() - -# Process GitHub releases -try: - releases = github.get_repo(GITHUB_REPO).get_releases() - # Fetch already mirrored objects from S3 - s3_response = s3.list_objects(Bucket=S3_BUCKET) - already_mirrored = set() - if 'Contents' in s3_response: - already_mirrored = set(obj["Key"] for obj in s3_response["Contents"]) - - for release in releases: - tag_name = release.tag_name - published_at = release.published_at.replace(tzinfo=None) # Removing timezone info for comparison - - # Skip the release if it's older than 3 years - if published_at < three_years_ago: - continue - - if re.match(release_pattern, tag_name): - for asset in release.get_assets(): - key = f"{tag_name}/{asset.name}" - if key not in already_mirrored: - response = requests.get(asset.browser_download_url) - response.raise_for_status() - s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) - -except Exception as e: - send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") - raise From 4aeffa56a8f8b3fda3639a54aa1d023613989d01 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 13:45:57 +0100 Subject: [PATCH 35/56] nits Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- scripts/mirror-actors/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index c357536f0..b64f42312 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -33,7 +33,7 @@ jobs: working-directory: scripts/mirror-actors run: poetry install --no-interaction --no-root - - name: Run tests + - name: Mirror Actors to DigitalOcean working-directory: scripts/mirror-actors if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: | diff --git a/scripts/mirror-actors/pyproject.toml b/scripts/mirror-actors/pyproject.toml index e0643c9c7..99e554194 100644 --- a/scripts/mirror-actors/pyproject.toml +++ b/scripts/mirror-actors/pyproject.toml @@ -2,7 +2,7 @@ name = "mirror-actors" version = "0.1.0" description = "" -authors = ["samuelarogbonlo "] +authors = ["samuelarogbonlo "] readme = "README.md" [tool.poetry.dependencies] From 28be1af6a10f62c231151f24f549424a9abd2b4b Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Fri, 19 Jan 2024 14:00:51 +0100 Subject: [PATCH 36/56] chore: add readme Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/README.md | 39 +++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 scripts/mirror-actors/README.md diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md new file mode 100644 index 000000000..9b76e250a --- /dev/null +++ b/scripts/mirror-actors/README.md @@ -0,0 +1,39 @@ +# Overview +This project automates the process of mirroring Filecoin's built-in actors' releases from GitHub to cloud storage services (DigitalOcean Spaces and CloudFlare R2). The script checks for new releases on GitHub, downloads them, and uploads them to the specified cloud storage. It's designed to run periodically and ensures that the latest releases are always available in the cloud storage. + + +# Workflow + +The project uses GitHub Actions for automated deployment: + +- **Frequency**: The script runs every hour (0 * * * *). +- **Triggered By**: Changes in the scripts/mirror-actors/** path in the repository. This includes both pull requests and push events. +- **Manual Trigger**: The workflow can also be triggered manually via the GitHub UI (workflow_dispatch event). + +# Manual deployments + +For manual deployments, particularly useful for testing and debugging, set the following environment variables: + +## Required environment variables + +```bash +# DigitalOcean and CloudFlare Access Tokens depending which coud you want to mirror to +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= + +# Slack Access Token and channel +export SLACK_API_TOKEN= +export SLACK_CHANNEL= + +# Boto client Configurations +export BUCKET_NAME= +export REGION_NAME= +export ENDPOINT_URL= +``` + +Playbook: + +```bash +$ poetry install --no-interaction --no-root # Install dependencies +$ poetry run python3 mirror_actors/ # Run the mirroring script +``` From 615571a50cbe263d859e23cf1d265944f30406f7 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Sun, 21 Jan 2024 20:56:53 +0100 Subject: [PATCH 37/56] nits: docs updates Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md index 9b76e250a..c0cce17cd 100644 --- a/scripts/mirror-actors/README.md +++ b/scripts/mirror-actors/README.md @@ -17,7 +17,7 @@ For manual deployments, particularly useful for testing and debugging, set the f ## Required environment variables ```bash -# DigitalOcean and CloudFlare Access Tokens depending which coud you want to mirror to +# DigitalOcean or CloudFlare Access Tokens depending which cloud you want to mirror to export AWS_ACCESS_KEY_ID= export AWS_SECRET_ACCESS_KEY= @@ -25,7 +25,7 @@ export AWS_SECRET_ACCESS_KEY= export SLACK_API_TOKEN= export SLACK_CHANNEL= -# Boto client Configurations +# s3 Boto client Configurations export BUCKET_NAME= export REGION_NAME= export ENDPOINT_URL= From 549ce30767b286271189601f0b165dc312a62dae Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Mon, 22 Jan 2024 10:45:10 +0100 Subject: [PATCH 38/56] nits: add docs to install Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md index c0cce17cd..c4d97f636 100644 --- a/scripts/mirror-actors/README.md +++ b/scripts/mirror-actors/README.md @@ -12,6 +12,13 @@ The project uses GitHub Actions for automated deployment: # Manual deployments +## Requirements + +### Software + +* [Python3](https://www.python.org/downloads/) +* [Poetry](https://python-poetry.org/docs/) + For manual deployments, particularly useful for testing and debugging, set the following environment variables: ## Required environment variables From 5703465ac49839e07ba3e574e572f4c8ee51e511 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Mon, 22 Jan 2024 14:34:09 +0100 Subject: [PATCH 39/56] feat: add linting ci Signed-off-by: samuelarogbonlo --- .github/workflows/scripts-lint.yml | 21 +++ .../mirror-actors/mirror_actors/__main__.py | 27 ++-- scripts/mirror-actors/poetry.lock | 127 +++++++++++++++++- scripts/mirror-actors/pyproject.toml | 5 + 4 files changed, 171 insertions(+), 9 deletions(-) diff --git a/.github/workflows/scripts-lint.yml b/.github/workflows/scripts-lint.yml index fec9d298f..ae949e0c0 100644 --- a/.github/workflows/scripts-lint.yml +++ b/.github/workflows/scripts-lint.yml @@ -38,3 +38,24 @@ jobs: yarn install yarn lint yarn js-check + run-py-linters: + runs-on: ubuntu-late + steps: + - uses: actions/checkout@v4 + - name: Install Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + + # - name: Install dependencies + # working-directory: scripts/mirror-actors + # run: poetry install --no-interaction --no-root + + - name: Lint Python Code + working-directory: scripts/mirror-actors + run: poetry run pylint mirror_actors/ -f actions diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 2d2b19d1b..4f6059601 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -1,7 +1,12 @@ +""" +This script mirrors Filecoin Actor releases from GitHub to an S3 bucket +and sends alerts to a Slack channel in case of failures. +""" + import os import re -import requests from datetime import datetime +import requests from dateutil.relativedelta import relativedelta import boto3 from slack_sdk.web import WebClient @@ -15,7 +20,7 @@ REGION_NAME = os.environ["REGION_NAME"] GITHUB_REPO = "filecoin-project/builtin-actors" -release_pattern = r'^v\d+\.\d+\.\d+.*$' +RELEASE_PATTERN = r'^v\d+\.\d+\.\d+.*$' # Initialize clients slack = WebClient(token=SLACK_API_TOKEN) @@ -29,6 +34,12 @@ def send_slack_alert(message): + """ + Send an alert message to a predefined Slack channel. + + Args: + message (str): The message to be sent to Slack. + """ slack.chat_postMessage( channel=SLACK_CHANNEL, text=message @@ -38,26 +49,26 @@ def send_slack_alert(message): try: releases = github.get_repo(GITHUB_REPO).get_releases() # Fetch already mirrored objects from S3 - s3_response = s3.list_objects(Bucket=BUCKET_NAME) + s3_response = s3.list_objects(Bucket=S3_BUCKET) already_mirrored = set() if 'Contents' in s3_response: already_mirrored = set(obj["Key"] for obj in s3_response["Contents"]) for release in releases: tag_name = release.tag_name - published_at = release.published_at.replace(tzinfo=None) # Removing timezone info for comparison - + # Removing timezone info for comparison + published_at = release.published_at.replace(tzinfo=None) # Skip the release if it's older than 3 years if published_at < three_years_ago: continue - if re.match(release_pattern, tag_name): + if re.match(RELEASE_PATTERN, tag_name): for asset in release.get_assets(): key = f"{tag_name}/{asset.name}" if key not in already_mirrored: - response = requests.get(asset.browser_download_url) + response = requests.get(asset.browser_download_url, timeout=30) response.raise_for_status() - s3.put_object(Bucket=BUCKET_NAME, Key=key, Body=response.content) + s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) except Exception as e: send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") diff --git a/scripts/mirror-actors/poetry.lock b/scripts/mirror-actors/poetry.lock index 5ba142a87..79d9be74b 100644 --- a/scripts/mirror-actors/poetry.lock +++ b/scripts/mirror-actors/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + [[package]] name = "boto3" version = "1.34.22" @@ -212,6 +223,17 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "cryptography" version = "41.0.7" @@ -274,6 +296,20 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "idna" version = "3.6" @@ -285,6 +321,20 @@ files = [ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "jmespath" version = "1.0.1" @@ -296,6 +346,32 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + [[package]] name = "pycparser" version = "2.21" @@ -347,6 +423,44 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pylint-actions" +version = "0.5.0" +description = "Pylint plugin for GitHub Actions" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pylint_actions-0.5.0-py3-none-any.whl", hash = "sha256:9a9922ae55c9afb56f602ef2434a6ea984c16504a159d6e4c439e71d325a0bfb"}, + {file = "pylint_actions-0.5.0.tar.gz", hash = "sha256:0ed9c6959226a335a6d0ae85943649f0ab88f6bfd3d3ee75b4fd85eac884f1d3"}, +] + +[package.dependencies] +pylint = ">=3.0.3,<4.0.0" + [[package]] name = "pynacl" version = "1.5.0" @@ -447,6 +561,17 @@ files = [ {file = "slack_sdk-3.26.2.tar.gz", hash = "sha256:bcdac5e688fa50e9357ecd00b803b6a8bad766aa614d35d8dc0636f40adc48bf"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -557,4 +682,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "3580c003f3e353816bc928849f63736ceec61b7e958772640536de8f6d94d1f3" +content-hash = "34d3ab2708f7fcfff364f100750a8694cc3d659c0a7a73a75f4ef98334693e1d" diff --git a/scripts/mirror-actors/pyproject.toml b/scripts/mirror-actors/pyproject.toml index 99e554194..400703943 100644 --- a/scripts/mirror-actors/pyproject.toml +++ b/scripts/mirror-actors/pyproject.toml @@ -11,8 +11,13 @@ requests = "^2.31.0" slack-sdk = "^3.26.2" boto3 = "^1.34.22" pygithub = "^2.1.1" +pylint = "^3.0.3" +pylint-actions = "^0.5.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.pylint.main] +load-plugins = "pylint_actions" From a7fb7b9f8f21e257040ff7bad1f514001ce74a04 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Mon, 22 Jan 2024 14:38:17 +0100 Subject: [PATCH 40/56] fix wrong OS distro Signed-off-by: samuelarogbonlo --- .github/workflows/scripts-lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scripts-lint.yml b/.github/workflows/scripts-lint.yml index ae949e0c0..03c829aeb 100644 --- a/.github/workflows/scripts-lint.yml +++ b/.github/workflows/scripts-lint.yml @@ -39,7 +39,7 @@ jobs: yarn lint yarn js-check run-py-linters: - runs-on: ubuntu-late + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install Python From 964a646c3bd2adb9c714d8af035a61e1f0b25757 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Mon, 22 Jan 2024 14:40:33 +0100 Subject: [PATCH 41/56] add install dep Signed-off-by: samuelarogbonlo --- .github/workflows/scripts-lint.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/scripts-lint.yml b/.github/workflows/scripts-lint.yml index 03c829aeb..06469ef6e 100644 --- a/.github/workflows/scripts-lint.yml +++ b/.github/workflows/scripts-lint.yml @@ -52,9 +52,9 @@ jobs: with: virtualenvs-create: true - # - name: Install dependencies - # working-directory: scripts/mirror-actors - # run: poetry install --no-interaction --no-root + - name: Install dependencies + working-directory: scripts/mirror-actors + run: poetry install --no-interaction --no-root - name: Lint Python Code working-directory: scripts/mirror-actors From 6a7a6d0cac51b32987a3e118fcdddb5713f2520e Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 12:17:16 +0100 Subject: [PATCH 42/56] add sink logic to local system and s3 Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 10 +- scripts/mirror-actors/README.md | 20 +- .../mirror-actors/mirror_actors/__main__.py | 145 +++++- scripts/mirror-actors/poetry.lock | 452 +++++++++++++++++- scripts/mirror-actors/pyproject.toml | 1 + 5 files changed, 597 insertions(+), 31 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index b64f42312..6e59686af 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -36,9 +36,7 @@ jobs: - name: Mirror Actors to DigitalOcean working-directory: scripts/mirror-actors if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: | - source .venv/bin/activate - python3 mirror_actors/ + run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-notifications" @@ -47,6 +45,7 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} REGION_NAME: fra1 ENDPOINT_URL: https://fra1.digitaloceanspaces.com + SINK_TYPE: "S3" mirror-releases-cf: runs-on: ubuntu-latest @@ -73,9 +72,7 @@ jobs: - name: Mirror Actors to CloudFlare working-directory: scripts/mirror-actors if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') - run: | - source .venv/bin/activate - python3 mirror_actors/ + run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-notifications" @@ -84,3 +81,4 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_KEY }} REGION_NAME: "auto" ENDPOINT_URL: "https://2238a825c5aca59233eab1f221f7aefb.r2.cloudflarestorage.com" + SINK_TYPE: "S3" diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md index c4d97f636..23c6fef29 100644 --- a/scripts/mirror-actors/README.md +++ b/scripts/mirror-actors/README.md @@ -6,9 +6,9 @@ This project automates the process of mirroring Filecoin's built-in actors' rele The project uses GitHub Actions for automated deployment: -- **Frequency**: The script runs every hour (0 * * * *). -- **Triggered By**: Changes in the scripts/mirror-actors/** path in the repository. This includes both pull requests and push events. -- **Manual Trigger**: The workflow can also be triggered manually via the GitHub UI (workflow_dispatch event). +- **Frequency**: The script runs every hour `(0 * * * *)`. +- **Triggered By**: Changes in the `scripts/mirror-actors/**` path in the repository. This includes both pull requests and push events. +- **Manual Trigger**: The workflow can also be triggered manually via the GitHub UI `(workflow_dispatch event)`. # Manual deployments @@ -24,23 +24,27 @@ For manual deployments, particularly useful for testing and debugging, set the f ## Required environment variables ```bash -# DigitalOcean or CloudFlare Access Tokens depending which cloud you want to mirror to -export AWS_ACCESS_KEY_ID= -export AWS_SECRET_ACCESS_KEY= - # Slack Access Token and channel export SLACK_API_TOKEN= export SLACK_CHANNEL= # s3 Boto client Configurations + +## DigitalOcean or CloudFlare Access Tokens depending which cloud you want to mirror to. +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= + export BUCKET_NAME= export REGION_NAME= export ENDPOINT_URL= + +# sink type meaning where you want to mirror actors (S3 or Local) +export SINK_TYPE= ``` Playbook: ```bash $ poetry install --no-interaction --no-root # Install dependencies -$ poetry run python3 mirror_actors/ # Run the mirroring script +$ poetry run python -m mirror_actors # Run the mirroring script ``` diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 4f6059601..1b902f7c4 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -1,10 +1,12 @@ """ -This script mirrors Filecoin Actor releases from GitHub to an S3 bucket -and sends alerts to a Slack channel in case of failures. +This script mirrors Filecoin Actor releases from GitHub to a specified storage sink. +It supports uploading to an S3 bucket or saving locally, based on configuration. +Alerts are sent to a Slack channel in case of failures. """ import os import re +import abc from datetime import datetime import requests from dateutil.relativedelta import relativedelta @@ -15,7 +17,7 @@ # Define environment variables SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] -S3_BUCKET = os.environ["S3_BUCKET"] +BUCKET_NAME = os.environ["BUCKET_NAME"] ENDPOINT_URL = os.environ["ENDPOINT_URL"] REGION_NAME = os.environ["REGION_NAME"] @@ -25,14 +27,11 @@ # Initialize clients slack = WebClient(token=SLACK_API_TOKEN) github = Github() -s3 = boto3.client('s3', - region_name=REGION_NAME, - endpoint_url=ENDPOINT_URL) +s3 = boto3.client('s3', region_name=REGION_NAME, endpoint_url=ENDPOINT_URL) -# Calculate the cutoff date (3 years ago from current date) +# Calculate the cutoff date (3 years ago from the current date) three_years_ago = datetime.now() - relativedelta(years=3) - def send_slack_alert(message): """ Send an alert message to a predefined Slack channel. @@ -45,30 +44,144 @@ def send_slack_alert(message): text=message ).validate() +# Abstract class for a data sink +class AbstractSink(abc.ABC): + """ + Abstract class for defining a data sink. + """ + @abc.abstractmethod + def save(self, data_key, content): + """ + Save content to the specified key. + + Args: + data_key (str): The key where the content will be saved. + content: The content to be saved. + """ + + @abc.abstractmethod + def exists(self, data_key): + """ + Check if a file exists in the sink. + + Args: + data_key (str): The key or path to check. + + Returns: + bool: True if the file exists, False otherwise. + """ + +# S3 data sink +class S3Sink(AbstractSink): + """ + S3 data sink for saving content to an S3 bucket. + """ + def __init__(self, bucket): + """ + Initialize the S3 client. + + Args: + bucket (str): The S3 bucket name. + """ + self.s3 = s3 + self.bucket = bucket + + def save(self, data_key, content): + """ + Save content to the specified key in the S3 bucket. + + Args: + sink_key (str): The key where the content will be saved. + content (bytes): The content to be saved. + """ + self.s3.put_object(Bucket=self.bucket, Key=data_key, Body=content) + + def exists(self, data_key): + """ + Check if a file exists in the S3 bucket. + + Args: + data_key (str): The key of the file to check. + + Returns: + bool: True if the file exists, False otherwise. + """ + try: + self.s3.head_object(Bucket=self.bucket, Key=data_key) + return True + except self.s3.exceptions.NoSuchKey: + return False + +# Local data sink +class LocalSink(AbstractSink): + """ + Local data sink for saving files to the local file system. + + Attributes: + base_dir (str): Base directory where files will be mirrored. + """ + def __init__(self, base_dir): + self.base_dir = base_dir + + def save(self, data_key, content): + full_path = os.path.join(self.base_dir, data_key) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + + with open(full_path, 'wb') as file: + file.write(content) + + def exists(self, data_key): + full_path = os.path.join(self.base_dir, data_key) + return os.path.exists(full_path) + +# Factory method to create the appropriate sink +def create_sink(sink_type, base_dir=None, **kwargs): + """ + Create and return the appropriate sink based on the specified type. + + Args: + sink_type (str): The type of sink to create ("S3" or "Local"). + base_dir (str, optional): Base directory for LocalSink, ignored for S3Sink. + **kwargs: Additional keyword arguments specific to the sink type. + + Returns: + AbstractSink: An instance of either S3Sink or LocalSink. + + Raises: + ValueError: If an invalid sink type is provided. + """ + if sink_type == "S3": + return S3Sink(**kwargs) + if sink_type == "Local": + return LocalSink(base_dir or "") + raise ValueError("Invalid sink type. Please Provide valid sink type, e.g S3 or Local") + + +SINK_TYPE = os.environ.get("SINK_TYPE", "Local") +LOCAL_SAVE_PATH = os.environ.get("LOCAL_SAVE_PATH", ".") +sink = create_sink(SINK_TYPE, bucket=BUCKET_NAME) + # Process GitHub releases try: releases = github.get_repo(GITHUB_REPO).get_releases() - # Fetch already mirrored objects from S3 - s3_response = s3.list_objects(Bucket=S3_BUCKET) + s3_response = s3.list_objects(Bucket=BUCKET_NAME) already_mirrored = set() if 'Contents' in s3_response: - already_mirrored = set(obj["Key"] for obj in s3_response["Contents"]) + already_mirrored = {obj["Key"] for obj in s3_response["Contents"]} for release in releases: tag_name = release.tag_name - # Removing timezone info for comparison published_at = release.published_at.replace(tzinfo=None) - # Skip the release if it's older than 3 years if published_at < three_years_ago: continue if re.match(RELEASE_PATTERN, tag_name): for asset in release.get_assets(): - key = f"{tag_name}/{asset.name}" - if key not in already_mirrored: + release_key = f"{tag_name}/{asset.name}" + if release_key not in already_mirrored: response = requests.get(asset.browser_download_url, timeout=30) response.raise_for_status() - s3.put_object(Bucket=S3_BUCKET, Key=key, Body=response.content) + sink.save(release_key, response.content) except Exception as e: send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") diff --git a/scripts/mirror-actors/poetry.lock b/scripts/mirror-actors/poetry.lock index 79d9be74b..21abd4891 100644 --- a/scripts/mirror-actors/poetry.lock +++ b/scripts/mirror-actors/poetry.lock @@ -30,6 +30,403 @@ s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +[[package]] +name = "boto3-stubs" +version = "1.34.25" +description = "Type annotations for boto3 1.34.25 generated with mypy-boto3-builder 7.23.1" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-stubs-1.34.25.tar.gz", hash = "sha256:97cbae694b68e2da325bac066c4ebe8b0ba0b5fd47425e644ac899a1709d270f"}, + {file = "boto3_stubs-1.34.25-py3-none-any.whl", hash = "sha256:a3e2d122e5bcb642c1ca2f8d268723e2eec7a7df18bd8ab9b465f9bd2af76d28"}, +] + +[package.dependencies] +botocore-stubs = "*" +types-s3transfer = "*" + +[package.extras] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] +account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] +acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-backupstorage (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-honeycode (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-supplychain (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] +amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] +amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] +appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] +appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] +appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] +athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] +backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.34.0,<1.35.0)"] +batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] +boto3 = ["boto3 (==1.34.25)", "botocore (==1.34.25)"] +braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] +budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] +ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] +chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] +codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] +config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] +connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] +controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] +cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] +databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] +datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] +datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] +dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] +detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] +discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] +dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] +dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] +docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] +drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] +ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] +ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] +ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] +ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] +efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] +eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] +elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] +emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] +es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] +essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] +evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] +finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] +firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] +fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] +fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] +forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] +freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] +fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] +glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] +glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] +grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] +health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.34.0,<1.35.0)"] +iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] +importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] +inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] +iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] +ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] +kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] +kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] +kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] +lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] +location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] +logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] +m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] +medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] +mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] +mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] +mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] +mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] +mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] +neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] +neptune-graph = ["mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] +networkmonitor = ["mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)"] +nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] +oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] +omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] +organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] +osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] +outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] +panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] +personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] +pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] +pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] +polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] +pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] +proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] +qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] +ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] +rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] +rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] +redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] +route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] +rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] +s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] +s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] +schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] +sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] +ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] +shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] +signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] +sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] +snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] +sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] +sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] +sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] +sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] +supplychain = ["mypy-boto3-supplychain (>=1.34.0,<1.35.0)"] +support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] +support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] +swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] +textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] +tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] +transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] +translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] +waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] +worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] +workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] +xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] + [[package]] name = "botocore" version = "1.34.22" @@ -49,6 +446,23 @@ urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} [package.extras] crt = ["awscrt (==0.19.19)"] +[[package]] +name = "botocore-stubs" +version = "1.34.25" +description = "Type annotations and code completion for botocore" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "botocore_stubs-1.34.25-py3-none-any.whl", hash = "sha256:332d1d36a809d0a59fb29ebbcca87265e15ec3ff75889e8dba74a55f4a8a24d3"}, + {file = "botocore_stubs-1.34.25.tar.gz", hash = "sha256:7322192021e0492204d223aac880d4d2bdfd390f3e7f3874d1e9e198f8248367"}, +] + +[package.dependencies] +types-awscrt = "*" + +[package.extras] +botocore = ["botocore"] + [[package]] name = "certifi" version = "2023.11.17" @@ -572,6 +986,42 @@ files = [ {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] +[[package]] +name = "types-awscrt" +version = "0.20.2" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "types_awscrt-0.20.2-py3-none-any.whl", hash = "sha256:7b5ccaaecc030ffe9e28f59e876a3013f401181ff04e0458e961d0bb9c7cdfbd"}, + {file = "types_awscrt-0.20.2.tar.gz", hash = "sha256:5e29986a4ad3968ac3d82caa85d668d3c66224c9ee78be3fe64bce30dae7b798"}, +] + +[[package]] +name = "types-boto3" +version = "1.0.2" +description = "Proxy package for boto3-stubs" +optional = false +python-versions = "*" +files = [ + {file = "types-boto3-1.0.2.tar.gz", hash = "sha256:15f3ffad0314e40a0708fec25f94891414f93260202422bf8b19b6913853c983"}, + {file = "types_boto3-1.0.2-py3-none-any.whl", hash = "sha256:a6a88e94d59d887839863a64095493956efc148e747206880a7eb47d90ae8398"}, +] + +[package.dependencies] +boto3-stubs = "*" + +[[package]] +name = "types-s3transfer" +version = "0.10.0" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "types_s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:44fcdf0097b924a9aab1ee4baa1179081a9559ca62a88c807e2b256893ce688f"}, + {file = "types_s3transfer-0.10.0.tar.gz", hash = "sha256:35e4998c25df7f8985ad69dedc8e4860e8af3b43b7615e940d53c00d413bdc69"}, +] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -682,4 +1132,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "34d3ab2708f7fcfff364f100750a8694cc3d659c0a7a73a75f4ef98334693e1d" +content-hash = "5a4994775b4ddde0c4e3773696ceef39e8fce683a2eb82d0abb27df539526929" diff --git a/scripts/mirror-actors/pyproject.toml b/scripts/mirror-actors/pyproject.toml index 400703943..c96a3fe45 100644 --- a/scripts/mirror-actors/pyproject.toml +++ b/scripts/mirror-actors/pyproject.toml @@ -13,6 +13,7 @@ boto3 = "^1.34.22" pygithub = "^2.1.1" pylint = "^3.0.3" pylint-actions = "^0.5.0" +types-boto3 = "^1.0.2" [build-system] From b0875e4bc8c7a30370c30c6401f9cadd3250825b Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 12:19:34 +0100 Subject: [PATCH 43/56] test fail ci Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/mirror_actors/__main__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 1b902f7c4..00337e3b9 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -73,9 +73,6 @@ def exists(self, data_key): # S3 data sink class S3Sink(AbstractSink): - """ - S3 data sink for saving content to an S3 bucket. - """ def __init__(self, bucket): """ Initialize the S3 client. From d0e286e76b9c69d21b1610073d2528102990c78d Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 12:20:48 +0100 Subject: [PATCH 44/56] fix ci Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/mirror_actors/__main__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 00337e3b9..1b902f7c4 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -73,6 +73,9 @@ def exists(self, data_key): # S3 data sink class S3Sink(AbstractSink): + """ + S3 data sink for saving content to an S3 bucket. + """ def __init__(self, bucket): """ Initialize the S3 client. From 013f439fb641d996cc4fb7e62cb75b08e70aa7b7 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 12:25:54 +0100 Subject: [PATCH 45/56] nits: add caching Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 6e59686af..b22b4a8f2 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -29,8 +29,16 @@ jobs: virtualenvs-in-project: true installer-parallel: true + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: scripts/mirror-actors/.venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies working-directory: scripts/mirror-actors + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction --no-root - name: Mirror Actors to DigitalOcean From d84f069863644266857ab2996f88daa4760abba5 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 13:39:01 +0100 Subject: [PATCH 46/56] nits: local cache poetr install Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index b22b4a8f2..04ef24b3a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -22,14 +22,22 @@ jobs: with: python-version: '3.12' + - name: Load cached Poetry installation + id: cached-poetry + uses: actions/cache@v3 + with: + path: ~/.local + key: poetry-0 + - name: Install Poetry + if: steps.cached-poetry.outputs.cache-hit != 'true' uses: snok/install-poetry@v1 with: virtualenvs-create: true virtualenvs-in-project: true installer-parallel: true - - name: Load cached venv + - name: Load cached dependencies Install id: cached-poetry-dependencies uses: actions/cache@v3 with: From bccd80f91dee1cc51813696e506263c67bf5f26a Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 14:05:09 +0100 Subject: [PATCH 47/56] cache for cloudflare job Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 04ef24b3a..146ef9fe1 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -74,6 +74,13 @@ jobs: with: python-version: '3.12' + - name: Load cached Poetry installation + id: cached-poetry + uses: actions/cache@v3 + with: + path: ~/.local + key: poetry-1 + - name: Install Poetry uses: snok/install-poetry@v1 with: @@ -81,8 +88,16 @@ jobs: virtualenvs-in-project: true installer-parallel: true + - name: Load cached dependencies Install + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: scripts/mirror-actors/.venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies working-directory: scripts/mirror-actors + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction --no-root - name: Mirror Actors to CloudFlare From 18e961350aea4069c044101d89eb6be213c332ca Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 14:07:32 +0100 Subject: [PATCH 48/56] add missed if job for cloudflare Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 146ef9fe1..32e6796b1 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -82,6 +82,7 @@ jobs: key: poetry-1 - name: Install Poetry + if: steps.cached-poetry.outputs.cache-hit != 'true' uses: snok/install-poetry@v1 with: virtualenvs-create: true From 1333ea66a68666f9c9c1829ed2000d6e8e76f788 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 15:05:08 +0100 Subject: [PATCH 49/56] chore: don't use s3 client when mirroring to local Signed-off-by: samuelarogbonlo --- scripts/mirror-actors/README.md | 19 ++- .../mirror-actors/mirror_actors/__main__.py | 114 +++++++++++++----- 2 files changed, 95 insertions(+), 38 deletions(-) diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md index 23c6fef29..7ff054ed3 100644 --- a/scripts/mirror-actors/README.md +++ b/scripts/mirror-actors/README.md @@ -24,13 +24,21 @@ For manual deployments, particularly useful for testing and debugging, set the f ## Required environment variables ```bash -# Slack Access Token and channel + +# Sink type: Indicates the target for mirroring actors (S3 or Local) +export SINK_TYPE= + +# For local mirroring (SINK_TYPE=Local), specify the save path +export LOCAL_SAVE_PATH= + + +# For s3 mirroring (SINK_TYPE=S3) +## Slack Access Token and channel export SLACK_API_TOKEN= export SLACK_CHANNEL= -# s3 Boto client Configurations - -## DigitalOcean or CloudFlare Access Tokens depending which cloud you want to mirror to. +## s3 Boto3 client configurations +### Access Tokens for DigitalOcean or CloudFlare, based on your cloud preference export AWS_ACCESS_KEY_ID= export AWS_SECRET_ACCESS_KEY= @@ -38,10 +46,9 @@ export BUCKET_NAME= export REGION_NAME= export ENDPOINT_URL= -# sink type meaning where you want to mirror actors (S3 or Local) -export SINK_TYPE= ``` + Playbook: ```bash diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 1b902f7c4..9aec8392b 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -14,20 +14,11 @@ from slack_sdk.web import WebClient from github import Github -# Define environment variables -SLACK_API_TOKEN = os.environ["SLACK_API_TOKEN"] -SLACK_CHANNEL = os.environ["SLACK_CHANNEL"] -BUCKET_NAME = os.environ["BUCKET_NAME"] -ENDPOINT_URL = os.environ["ENDPOINT_URL"] -REGION_NAME = os.environ["REGION_NAME"] - GITHUB_REPO = "filecoin-project/builtin-actors" RELEASE_PATTERN = r'^v\d+\.\d+\.\d+.*$' -# Initialize clients -slack = WebClient(token=SLACK_API_TOKEN) +# Initialize GitHub client github = Github() -s3 = boto3.client('s3', region_name=REGION_NAME, endpoint_url=ENDPOINT_URL) # Calculate the cutoff date (3 years ago from the current date) three_years_ago = datetime.now() - relativedelta(years=3) @@ -39,8 +30,15 @@ def send_slack_alert(message): Args: message (str): The message to be sent to Slack. """ + # Define environment variables + slack_api_token = os.environ["SLACK_API_TOKEN"] + slack_channel = os.environ["SLACK_CHANNEL"] + + # Initialize Slack client + slack = WebClient(token=slack_api_token) + slack.chat_postMessage( - channel=SLACK_CHANNEL, + channel=slack_channel, text=message ).validate() @@ -71,30 +69,39 @@ def exists(self, data_key): bool: True if the file exists, False otherwise. """ + @abc.abstractmethod + def list_files(self): + """ + List all files in the sink. + + Returns: + set: A set of file paths. + """ + # S3 data sink class S3Sink(AbstractSink): """ S3 data sink for saving content to an S3 bucket. """ - def __init__(self, bucket): + + def __init__(self): """ Initialize the S3 client. - - Args: - bucket (str): The S3 bucket name. """ - self.s3 = s3 - self.bucket = bucket + self.bucket_name = os.environ["BUCKET_NAME"] + endpoint_url = os.environ["ENDPOINT_URL"] + region_name = os.environ["REGION_NAME"] + self.s3 = boto3.client('s3', region_name=endpoint_url, endpoint_url=region_name) def save(self, data_key, content): """ Save content to the specified key in the S3 bucket. Args: - sink_key (str): The key where the content will be saved. + data_key (str): The key where the content will be saved. content (bytes): The content to be saved. """ - self.s3.put_object(Bucket=self.bucket, Key=data_key, Body=content) + self.s3.put_object(Bucket=self.bucket_name, Key=data_key, Body=content) def exists(self, data_key): """ @@ -107,33 +114,75 @@ def exists(self, data_key): bool: True if the file exists, False otherwise. """ try: - self.s3.head_object(Bucket=self.bucket, Key=data_key) + self.s3.head_object(Bucket=self.bucket_name, Key=data_key) return True except self.s3.exceptions.NoSuchKey: return False + def list_files(self): + """ + List all files in the S3 bucket. + + Returns: + set: A set of file paths in the bucket. + """ + s3_response = self.s3.list_objects(Bucket=self.bucket_name) + return {obj["Key"] for obj in s3_response.get('Contents', [])} + + # Local data sink class LocalSink(AbstractSink): """ Local data sink for saving files to the local file system. - - Attributes: - base_dir (str): Base directory where files will be mirrored. """ def __init__(self, base_dir): + """ + Initialize the local sink. + + Args: + base_dir (str): Base directory where files will be mirrored. + """ self.base_dir = base_dir def save(self, data_key, content): + """ + Save content to the specified key in the local filesystem. + + Args: + data_key (str): The key where the content will be saved. + content (bytes): The content to be saved. + """ full_path = os.path.join(self.base_dir, data_key) os.makedirs(os.path.dirname(full_path), exist_ok=True) - with open(full_path, 'wb') as file: file.write(content) def exists(self, data_key): + """ + Check if a file exists in the local filesystem. + + Args: + data_key (str): The key of the file to check. + + Returns: + bool: True if the file exists, False otherwise. + """ full_path = os.path.join(self.base_dir, data_key) return os.path.exists(full_path) + def list_files(self): + """ + List all files in the local filesystem. + + Returns: + set: A set of file paths in the base directory. + """ + mirrored_files = set() + for root, _, files in os.walk(self.base_dir): + for file in files: + mirrored_files.add(os.path.join(root, file)) + return mirrored_files + # Factory method to create the appropriate sink def create_sink(sink_type, base_dir=None, **kwargs): """ @@ -154,20 +203,17 @@ def create_sink(sink_type, base_dir=None, **kwargs): return S3Sink(**kwargs) if sink_type == "Local": return LocalSink(base_dir or "") - raise ValueError("Invalid sink type. Please Provide valid sink type, e.g S3 or Local") - + raise ValueError("Invalid sink type. Please provide a valid sink type, e.g,'S3' or 'Local'") +# Determine sink type and initialize SINK_TYPE = os.environ.get("SINK_TYPE", "Local") LOCAL_SAVE_PATH = os.environ.get("LOCAL_SAVE_PATH", ".") -sink = create_sink(SINK_TYPE, bucket=BUCKET_NAME) +sink = create_sink(SINK_TYPE, base_dir=LOCAL_SAVE_PATH) # Process GitHub releases try: releases = github.get_repo(GITHUB_REPO).get_releases() - s3_response = s3.list_objects(Bucket=BUCKET_NAME) - already_mirrored = set() - if 'Contents' in s3_response: - already_mirrored = {obj["Key"] for obj in s3_response["Contents"]} + already_mirrored = sink.list_files() for release in releases: tag_name = release.tag_name @@ -184,5 +230,9 @@ def create_sink(sink_type, base_dir=None, **kwargs): sink.save(release_key, response.content) except Exception as e: - send_slack_alert(f"ā›” Filecoin Actor mirroring failed: {e}") + error_message = f"ā›” Filecoin Actor mirroring failed: {e}" + if SINK_TYPE == "S3": + send_slack_alert(error_message) + else: + print(error_message) raise From ccb2989e23f1654291b81118d222069f55ef05d9 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 18:30:49 +0100 Subject: [PATCH 50/56] chore: reduce my over kill Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 - scripts/mirror-actors/README.md | 16 +- .../mirror-actors/mirror_actors/__main__.py | 208 +++--------------- 3 files changed, 35 insertions(+), 191 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 32e6796b1..09191bd6a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -61,7 +61,6 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} REGION_NAME: fra1 ENDPOINT_URL: https://fra1.digitaloceanspaces.com - SINK_TYPE: "S3" mirror-releases-cf: runs-on: ubuntu-latest @@ -113,4 +112,3 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_KEY }} REGION_NAME: "auto" ENDPOINT_URL: "https://2238a825c5aca59233eab1f221f7aefb.r2.cloudflarestorage.com" - SINK_TYPE: "S3" diff --git a/scripts/mirror-actors/README.md b/scripts/mirror-actors/README.md index 7ff054ed3..d5e93a7d5 100644 --- a/scripts/mirror-actors/README.md +++ b/scripts/mirror-actors/README.md @@ -25,20 +25,19 @@ For manual deployments, particularly useful for testing and debugging, set the f ```bash -# Sink type: Indicates the target for mirroring actors (S3 or Local) -export SINK_TYPE= +# Defines if mirroring actors are on local storage or s3. Set to 'True' for local, 'False' for s3. +export USE_LOCAL= -# For local mirroring (SINK_TYPE=Local), specify the save path +# Path to save mirrored data if local mirroring is enabled (USE_LOCAL=true) export LOCAL_SAVE_PATH= - -# For s3 mirroring (SINK_TYPE=S3) -## Slack Access Token and channel +# Configuration for s3 mirroring (USE_LOCAL=false) +## Slack credentials and target channel export SLACK_API_TOKEN= export SLACK_CHANNEL= -## s3 Boto3 client configurations -### Access Tokens for DigitalOcean or CloudFlare, based on your cloud preference +## Settings for s3 Boto3 client +### Credentials for DigitalOcean or CloudFlare, depending on chosen cloud service export AWS_ACCESS_KEY_ID= export AWS_SECRET_ACCESS_KEY= @@ -48,7 +47,6 @@ export ENDPOINT_URL= ``` - Playbook: ```bash diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 9aec8392b..3a4c79a4c 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -6,7 +6,6 @@ import os import re -import abc from datetime import datetime import requests from dateutil.relativedelta import relativedelta @@ -26,194 +25,42 @@ def send_slack_alert(message): """ Send an alert message to a predefined Slack channel. - - Args: - message (str): The message to be sent to Slack. """ - # Define environment variables slack_api_token = os.environ["SLACK_API_TOKEN"] slack_channel = os.environ["SLACK_CHANNEL"] - - # Initialize Slack client slack = WebClient(token=slack_api_token) + slack.chat_postMessage(channel=slack_channel, text=message).validate() - slack.chat_postMessage( - channel=slack_channel, - text=message - ).validate() - -# Abstract class for a data sink -class AbstractSink(abc.ABC): - """ - Abstract class for defining a data sink. - """ - @abc.abstractmethod - def save(self, data_key, content): - """ - Save content to the specified key. - - Args: - data_key (str): The key where the content will be saved. - content: The content to be saved. - """ - - @abc.abstractmethod - def exists(self, data_key): - """ - Check if a file exists in the sink. - - Args: - data_key (str): The key or path to check. - - Returns: - bool: True if the file exists, False otherwise. - """ - - @abc.abstractmethod - def list_files(self): - """ - List all files in the sink. - - Returns: - set: A set of file paths. - """ - -# S3 data sink -class S3Sink(AbstractSink): - """ - S3 data sink for saving content to an S3 bucket. - """ - - def __init__(self): - """ - Initialize the S3 client. - """ - self.bucket_name = os.environ["BUCKET_NAME"] - endpoint_url = os.environ["ENDPOINT_URL"] - region_name = os.environ["REGION_NAME"] - self.s3 = boto3.client('s3', region_name=endpoint_url, endpoint_url=region_name) - - def save(self, data_key, content): - """ - Save content to the specified key in the S3 bucket. - - Args: - data_key (str): The key where the content will be saved. - content (bytes): The content to be saved. - """ - self.s3.put_object(Bucket=self.bucket_name, Key=data_key, Body=content) - - def exists(self, data_key): - """ - Check if a file exists in the S3 bucket. - - Args: - data_key (str): The key of the file to check. - - Returns: - bool: True if the file exists, False otherwise. - """ - try: - self.s3.head_object(Bucket=self.bucket_name, Key=data_key) - return True - except self.s3.exceptions.NoSuchKey: - return False - - def list_files(self): - """ - List all files in the S3 bucket. - - Returns: - set: A set of file paths in the bucket. - """ - s3_response = self.s3.list_objects(Bucket=self.bucket_name) - return {obj["Key"] for obj in s3_response.get('Contents', [])} - - -# Local data sink -class LocalSink(AbstractSink): +def save_to_s3(key, content): """ - Local data sink for saving files to the local file system. + Save content to S3 bucket. """ - def __init__(self, base_dir): - """ - Initialize the local sink. - - Args: - base_dir (str): Base directory where files will be mirrored. - """ - self.base_dir = base_dir - - def save(self, data_key, content): - """ - Save content to the specified key in the local filesystem. + # Retrieve S3 configuration from environment variables + bucket_name = os.environ.get("BUCKET_NAME") + endpoint_url = os.environ.get("ENDPOINT_URL") + region_name = os.environ.get("REGION_NAME") - Args: - data_key (str): The key where the content will be saved. - content (bytes): The content to be saved. - """ - full_path = os.path.join(self.base_dir, data_key) - os.makedirs(os.path.dirname(full_path), exist_ok=True) - with open(full_path, 'wb') as file: - file.write(content) + # Initialize and use S3 client + s3 = boto3.client('s3', endpoint_url=endpoint_url, region_name=region_name) + s3.put_object(Bucket=bucket_name, Key=key, Body=content) - def exists(self, data_key): - """ - Check if a file exists in the local filesystem. - Args: - data_key (str): The key of the file to check. - - Returns: - bool: True if the file exists, False otherwise. - """ - full_path = os.path.join(self.base_dir, data_key) - return os.path.exists(full_path) - - def list_files(self): - """ - List all files in the local filesystem. - - Returns: - set: A set of file paths in the base directory. - """ - mirrored_files = set() - for root, _, files in os.walk(self.base_dir): - for file in files: - mirrored_files.add(os.path.join(root, file)) - return mirrored_files - -# Factory method to create the appropriate sink -def create_sink(sink_type, base_dir=None, **kwargs): +def save_to_local(base_dir, key, content): """ - Create and return the appropriate sink based on the specified type. - - Args: - sink_type (str): The type of sink to create ("S3" or "Local"). - base_dir (str, optional): Base directory for LocalSink, ignored for S3Sink. - **kwargs: Additional keyword arguments specific to the sink type. - - Returns: - AbstractSink: An instance of either S3Sink or LocalSink. - - Raises: - ValueError: If an invalid sink type is provided. + Mirror Actors to local filesystem. """ - if sink_type == "S3": - return S3Sink(**kwargs) - if sink_type == "Local": - return LocalSink(base_dir or "") - raise ValueError("Invalid sink type. Please provide a valid sink type, e.g,'S3' or 'Local'") + full_path = os.path.join(base_dir, key) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, 'wb') as file: + file.write(content) -# Determine sink type and initialize -SINK_TYPE = os.environ.get("SINK_TYPE", "Local") +# Configuration +USE_LOCAL = os.environ.get("USE_LOCAL", "False") == "True" LOCAL_SAVE_PATH = os.environ.get("LOCAL_SAVE_PATH", ".") -sink = create_sink(SINK_TYPE, base_dir=LOCAL_SAVE_PATH) # Process GitHub releases try: releases = github.get_repo(GITHUB_REPO).get_releases() - already_mirrored = sink.list_files() for release in releases: tag_name = release.tag_name @@ -223,16 +70,17 @@ def create_sink(sink_type, base_dir=None, **kwargs): if re.match(RELEASE_PATTERN, tag_name): for asset in release.get_assets(): - release_key = f"{tag_name}/{asset.name}" - if release_key not in already_mirrored: - response = requests.get(asset.browser_download_url, timeout=30) - response.raise_for_status() - sink.save(release_key, response.content) + release = f"{tag_name}/{asset.name}" + response = requests.get(asset.browser_download_url, timeout=30) + response.raise_for_status() + + # Save using the appropriate sink + if USE_LOCAL: + save_to_local(LOCAL_SAVE_PATH, release, response.content) + else: + save_to_s3(release, response.content) except Exception as e: error_message = f"ā›” Filecoin Actor mirroring failed: {e}" - if SINK_TYPE == "S3": - send_slack_alert(error_message) - else: - print(error_message) + send_slack_alert(error_message) raise From be30f94f61384271edd76f284e7b65b15474433c Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 19:00:35 +0100 Subject: [PATCH 51/56] improve mirror logic Signed-off-by: samuelarogbonlo --- .../mirror-actors/mirror_actors/__main__.py | 41 +++++++++++++------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/scripts/mirror-actors/mirror_actors/__main__.py b/scripts/mirror-actors/mirror_actors/__main__.py index 3a4c79a4c..da8eaa38e 100644 --- a/scripts/mirror-actors/mirror_actors/__main__.py +++ b/scripts/mirror-actors/mirror_actors/__main__.py @@ -3,7 +3,6 @@ It supports uploading to an S3 bucket or saving locally, based on configuration. Alerts are sent to a Slack channel in case of failures. """ - import os import re from datetime import datetime @@ -31,36 +30,51 @@ def send_slack_alert(message): slack = WebClient(token=slack_api_token) slack.chat_postMessage(channel=slack_channel, text=message).validate() -def save_to_s3(key, content): +def list_s3_objects(bucket_name, endpoint_url, region_name): """ - Save content to S3 bucket. + List all objects in an S3 bucket. """ - # Retrieve S3 configuration from environment variables - bucket_name = os.environ.get("BUCKET_NAME") - endpoint_url = os.environ.get("ENDPOINT_URL") - region_name = os.environ.get("REGION_NAME") + s3 = boto3.client('s3', endpoint_url=endpoint_url, region_name=region_name) + s3_response = s3.list_objects_v2(Bucket=bucket_name) - # Initialize and use S3 client + if 'Contents' in s3_response and s3_response['Contents']: + return {item['Key'] for item in s3_response['Contents']} + return set() + +def save_to_s3(bucket_name, key, content, endpoint_url, region_name): + """ + Mirror Actors to S3 bucket. + """ s3 = boto3.client('s3', endpoint_url=endpoint_url, region_name=region_name) s3.put_object(Bucket=bucket_name, Key=key, Body=content) - def save_to_local(base_dir, key, content): """ Mirror Actors to local filesystem. """ full_path = os.path.join(base_dir, key) os.makedirs(os.path.dirname(full_path), exist_ok=True) - with open(full_path, 'wb') as file: - file.write(content) + with open(full_path, 'wb') as local_file: + local_file.write(content) # Configuration USE_LOCAL = os.environ.get("USE_LOCAL", "False") == "True" LOCAL_SAVE_PATH = os.environ.get("LOCAL_SAVE_PATH", ".") +BUCKET_NAME = os.environ.get("BUCKET_NAME", "") +ENDPOINT_URL = os.environ.get("ENDPOINT_URL", "") +REGION_NAME = os.environ.get("REGION_NAME", "") # Process GitHub releases try: releases = github.get_repo(GITHUB_REPO).get_releases() + already_mirrored = set() + + if USE_LOCAL: + for root, _, files in os.walk(LOCAL_SAVE_PATH): + for file in files: + already_mirrored.add(os.path.join(root, file)) + else: + already_mirrored = list_s3_objects(BUCKET_NAME, ENDPOINT_URL, REGION_NAME) for release in releases: tag_name = release.tag_name @@ -71,6 +85,9 @@ def save_to_local(base_dir, key, content): if re.match(RELEASE_PATTERN, tag_name): for asset in release.get_assets(): release = f"{tag_name}/{asset.name}" + if release in already_mirrored: + continue # Skip already mirrored assets + response = requests.get(asset.browser_download_url, timeout=30) response.raise_for_status() @@ -78,7 +95,7 @@ def save_to_local(base_dir, key, content): if USE_LOCAL: save_to_local(LOCAL_SAVE_PATH, release, response.content) else: - save_to_s3(release, response.content) + save_to_s3(BUCKET_NAME, release, response.content, ENDPOINT_URL, REGION_NAME) except Exception as e: error_message = f"ā›” Filecoin Actor mirroring failed: {e}" From 0858fa6311b6f896840a11aa803be31398e8671d Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 19:02:14 +0100 Subject: [PATCH 52/56] test mirror Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 09191bd6a..ea7e5f8a3 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -102,7 +102,7 @@ jobs: - name: Mirror Actors to CloudFlare working-directory: scripts/mirror-actors - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} From 9f2599acca20ab17aeedd1d50ebfa46ab3cd4f98 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 19:03:59 +0100 Subject: [PATCH 53/56] test mirror to do Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index ea7e5f8a3..6bbe21eb1 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -51,7 +51,7 @@ jobs: - name: Mirror Actors to DigitalOcean working-directory: scripts/mirror-actors - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} @@ -102,7 +102,7 @@ jobs: - name: Mirror Actors to CloudFlare working-directory: scripts/mirror-actors - #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} From 85fa13c61576f04fb1dc2590bbec5b5ae4023868 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Tue, 23 Jan 2024 19:05:37 +0100 Subject: [PATCH 54/56] revert test mirror Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 6bbe21eb1..09191bd6a 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -51,7 +51,7 @@ jobs: - name: Mirror Actors to DigitalOcean working-directory: scripts/mirror-actors - #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} From 0723d4c3fe2b35f21828289e5100d64da7aec7d5 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Wed, 24 Jan 2024 16:06:40 +0100 Subject: [PATCH 55/56] test deployed cf Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index 09191bd6a..a044ab541 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -102,13 +102,13 @@ jobs: - name: Mirror Actors to CloudFlare working-directory: scripts/mirror-actors - if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }} SLACK_CHANNEL: "#forest-notifications" BUCKET_NAME: filecoin-builtin-actors - AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACTORS_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_ACTORS_SECRET_KEY }} REGION_NAME: "auto" ENDPOINT_URL: "https://2238a825c5aca59233eab1f221f7aefb.r2.cloudflarestorage.com" From 0189aac9bf31bbac90ee47b0543231df70361066 Mon Sep 17 00:00:00 2001 From: samuelarogbonlo Date: Wed, 24 Jan 2024 16:08:46 +0100 Subject: [PATCH 56/56] revert test Signed-off-by: samuelarogbonlo --- .github/workflows/mirror-builtin-actors.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror-builtin-actors.yml b/.github/workflows/mirror-builtin-actors.yml index a044ab541..c8b54db46 100644 --- a/.github/workflows/mirror-builtin-actors.yml +++ b/.github/workflows/mirror-builtin-actors.yml @@ -102,7 +102,7 @@ jobs: - name: Mirror Actors to CloudFlare working-directory: scripts/mirror-actors - #if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') run: poetry run python -m mirror_actors env: SLACK_API_TOKEN: ${{ secrets.SLACK_TOKEN }}