mirror of
https://github.com/saymrwulf/pytorch.git
synced 2026-05-14 20:57:59 +00:00
.circleci: Add simple backup and restore solution for RCs (#38690)
Summary: * Does a basic upload of release candidates to an extra folder within our S3 bucket. * Refactors AWS promotion to allow for easier development of restoration of backups Backup restoration usage: ``` RESTORE_FROM=v1.6.0-rc3 restore-backup.sh ``` Requires: * AWS credentials to upload / download stuff * Anaconda credentials to upload Pull Request resolved: https://github.com/pytorch/pytorch/pull/38690 Differential Revision: D21691033 Pulled By: seemethere fbshipit-source-id: 31118814db1ca701c55a3cb0bc32caa1e77a833d
This commit is contained in:
parent
481838f21b
commit
5dd65ba634
9 changed files with 188 additions and 46 deletions
|
|
@ -18,20 +18,32 @@ PIP_UPLOAD_FOLDER=${PIP_UPLOAD_FOLDER:-nightly}
|
|||
# The only difference is the trailing slash
|
||||
# Strip trailing slashes if there
|
||||
CONDA_UPLOAD_CHANNEL=$(echo "${PIP_UPLOAD_FOLDER}" | sed 's:/*$::')
|
||||
BACKUP_BUCKET="s3://pytorch-backup"
|
||||
|
||||
# Upload the package to the final location
|
||||
pushd /home/circleci/project/final_pkgs
|
||||
if [[ "$PACKAGE_TYPE" == conda ]]; then
|
||||
retry conda install -yq anaconda-client
|
||||
retry anaconda -t "${CONDA_PYTORCHBOT_TOKEN}" upload "$(ls)" -u "pytorch-${CONDA_UPLOAD_CHANNEL}" --label main --no-progress --force
|
||||
# Fetch platform (eg. win-64, linux-64, etc.) from index file
|
||||
# Because there's no actual conda command to read this
|
||||
subdir=$(tar -xOf ./*.bz2 info/index.json | grep subdir | cut -d ':' -f2 | sed -e 's/[[:space:]]//' -e 's/"//g' -e 's/,//')
|
||||
BACKUP_DIR="conda/${subdir}"
|
||||
elif [[ "$PACKAGE_TYPE" == libtorch ]]; then
|
||||
retry pip install -q awscli
|
||||
s3_dir="s3://pytorch/libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
for pkg in $(ls); do
|
||||
retry aws s3 cp "$pkg" "$s3_dir" --acl public-read
|
||||
done
|
||||
BACKUP_DIR="libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
else
|
||||
retry pip install -q awscli
|
||||
s3_dir="s3://pytorch/whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
retry aws s3 cp "$(ls)" "$s3_dir" --acl public-read
|
||||
BACKUP_DIR="whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
fi
|
||||
|
||||
if [[ -n "${CIRCLE_TAG:-}" ]]; then
|
||||
s3_dir="${BACKUP_BUCKET}/${CIRCLE_TAG}/${BACKUP_DIR}"
|
||||
retry aws s3 cp . "$s3_dir"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -19,19 +19,31 @@ PIP_UPLOAD_FOLDER=${PIP_UPLOAD_FOLDER:-nightly}
|
|||
# The only difference is the trailing slash
|
||||
# Strip trailing slashes if there
|
||||
CONDA_UPLOAD_CHANNEL=$(echo "${PIP_UPLOAD_FOLDER}" | sed 's:/*$::')
|
||||
BACKUP_BUCKET="s3://pytorch-backup"
|
||||
|
||||
pushd "$workdir/final_pkgs"
|
||||
if [[ "$PACKAGE_TYPE" == conda ]]; then
|
||||
retry conda install -yq anaconda-client
|
||||
retry anaconda -t "${CONDA_PYTORCHBOT_TOKEN}" upload "$(ls)" -u "pytorch-${CONDA_UPLOAD_CHANNEL}" --label main --no-progress --force
|
||||
# Fetch platform (eg. win-64, linux-64, etc.) from index file
|
||||
# Because there's no actual conda command to read this
|
||||
subdir=$(tar -xOf ./*.bz2 info/index.json | grep subdir | cut -d ':' -f2 | sed -e 's/[[:space:]]//' -e 's/"//g' -e 's/,//')
|
||||
BACKUP_DIR="conda/${subdir}"
|
||||
elif [[ "$PACKAGE_TYPE" == libtorch ]]; then
|
||||
retry pip install -q awscli
|
||||
s3_dir="s3://pytorch/libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
for pkg in $(ls); do
|
||||
retry aws s3 cp "$pkg" "$s3_dir" --acl public-read
|
||||
done
|
||||
BACKUP_DIR="libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
else
|
||||
retry pip install -q awscli
|
||||
s3_dir="s3://pytorch/whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
retry aws s3 cp "$(ls)" "$s3_dir" --acl public-read
|
||||
BACKUP_DIR="whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
fi
|
||||
|
||||
if [[ -n "${CIRCLE_TAG:-}" ]]; then
|
||||
s3_dir="${BACKUP_BUCKET}/${CIRCLE_TAG}/${BACKUP_DIR}"
|
||||
retry aws s3 cp . "$s3_dir"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -17,21 +17,32 @@ PIP_UPLOAD_FOLDER=${PIP_UPLOAD_FOLDER:-nightly/}
|
|||
# The only difference is the trailing slash
|
||||
# Strip trailing slashes if there
|
||||
CONDA_UPLOAD_CHANNEL=$(echo "${PIP_UPLOAD_FOLDER}" | sed 's:/*$::')
|
||||
BACKUP_BUCKET="s3://pytorch-backup"
|
||||
|
||||
pushd /root/workspace/final_pkgs
|
||||
# Upload the package to the final location
|
||||
if [[ "$PACKAGE_TYPE" == conda ]]; then
|
||||
retry conda install -yq anaconda-client
|
||||
retry anaconda -t "${CONDA_PYTORCHBOT_TOKEN}" upload "$(ls)" -u "pytorch-${CONDA_UPLOAD_CHANNEL}" --label main --no-progress --force
|
||||
# Fetch platform (eg. win-64, linux-64, etc.) from index file
|
||||
# Because there's no actual conda command to read this
|
||||
subdir=$(tar -xOf ./*.bz2 info/index.json | grep subdir | cut -d ':' -f2 | sed -e 's/[[:space:]]//' -e 's/"//g' -e 's/,//')
|
||||
BACKUP_DIR="conda/${subdir}"
|
||||
elif [[ "$PACKAGE_TYPE" == libtorch ]]; then
|
||||
retry conda install -c conda-forge -yq awscli
|
||||
s3_dir="s3://pytorch/libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
for pkg in $(ls); do
|
||||
retry aws s3 cp "$pkg" "$s3_dir" --acl public-read
|
||||
done
|
||||
BACKUP_DIR="libtorch/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
else
|
||||
retry conda install -c conda-forge -yq awscli
|
||||
s3_dir="s3://pytorch/whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
retry aws s3 cp "$(ls)" "$s3_dir" --acl public-read
|
||||
BACKUP_DIR="whl/${PIP_UPLOAD_FOLDER}${DESIRED_CUDA}/"
|
||||
fi
|
||||
|
||||
if [[ -n "${CIRCLE_TAG:-}" ]]; then
|
||||
s3_dir="${BACKUP_BUCKET}/${CIRCLE_TAG}/${BACKUP_DIR}"
|
||||
retry aws s3 cp . "$s3_dir"
|
||||
fi
|
||||
|
|
|
|||
60
scripts/release/README.md
Normal file
60
scripts/release/README.md
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# PyTorch Release Scripts
|
||||
|
||||
These are a collection of scripts that are to be used for release activities.
|
||||
|
||||
> NOTE: All scripts should do no actual work unless the `DRY_RUN` environment variable is set
|
||||
> to `disabled`.
|
||||
> The basic idea being that there should be no potential to do anything dangerous unless
|
||||
> `DRY_RUN` is explicitly set to `disabled`.
|
||||
|
||||
## Requirements to actually run these scripts
|
||||
* AWS access to pytorch account
|
||||
* Access to upload conda packages to the `pytorch` conda channel
|
||||
* Access to the PyPI repositories
|
||||
|
||||
|
||||
## Promote
|
||||
|
||||
These are scripts related to promotion of release candidates to GA channels, these
|
||||
can actually be used to promote pytorch, libtorch, and related domain libraries.
|
||||
|
||||
### Usage
|
||||
|
||||
Usage should be fairly straightforward and should actually require no extra variables
|
||||
if you are running from the correct git tags. (i.e. the GA tag to promote is currently
|
||||
checked out)
|
||||
|
||||
`PACKAGE_TYPE` and `PACKAGE_NAME` can be swapped out to promote other packages.
|
||||
|
||||
#### Promoting pytorch wheels
|
||||
```bash
|
||||
promote/s3_to_s3.sh
|
||||
```
|
||||
|
||||
#### Promoting libtorch archives
|
||||
```bash
|
||||
PACKAGE_TYPE=libtorch PACKAGE_NAME=libtorch promote/s3_to_s3.sh
|
||||
```
|
||||
|
||||
#### Promoting conda packages
|
||||
```bash
|
||||
promote/conda_to_conda.sh
|
||||
```
|
||||
|
||||
#### Promoting wheels to PyPI
|
||||
**WARNING**: These can only be run once and cannot be undone, run with caution
|
||||
```
|
||||
promote/wheel_to_pypi.sh
|
||||
```
|
||||
|
||||
## Restoring backups
|
||||
|
||||
All release candidates are currently backed up to `s3://pytorch-backup/${TAG_NAME}` and
|
||||
can be restored to the test channels with the `restore-backup.sh` script.
|
||||
|
||||
Which backup to restore from is dictated by the `RESTORE_FROM` environment variable.
|
||||
|
||||
### Usage
|
||||
```bash
|
||||
RESTORE_FROM=v1.5.0-rc5 ./restore-backup.sh
|
||||
```
|
||||
|
|
@ -36,17 +36,8 @@ get_pytorch_version() {
|
|||
}
|
||||
|
||||
aws_promote() {
|
||||
package_type=$1
|
||||
package_name=$2
|
||||
package_name=$1
|
||||
pytorch_version=$(get_pytorch_version)
|
||||
PYTORCH_S3_FROM=${PYTORCH_S3_FROM:-test}
|
||||
PYTORCH_S3_TO=${PYTORCH_S3_TO:-}
|
||||
if [[ -n "${PYTORCH_S3_TO}" ]]; then
|
||||
# Add a trailing slash so that it'll go to the correct subdir instead of creating a prefix+file thing
|
||||
PYTORCH_S3_TO="${PYTORCH_S3_TO}/"
|
||||
fi
|
||||
# Can be changed
|
||||
PYTORCH_S3_BUCKET=${PYTORCH_S3_BUCKET:-"s3://pytorch"}
|
||||
# Dry run by default
|
||||
DRY_RUN=${DRY_RUN:-enabled}
|
||||
DRY_RUN_FLAG="--dryrun"
|
||||
|
|
@ -54,22 +45,16 @@ aws_promote() {
|
|||
DRY_RUN_FLAG=""
|
||||
fi
|
||||
AWS=${AWS:-aws}
|
||||
while IFS=$'\n' read -r s3_ls_result; do
|
||||
# File should be the last field
|
||||
from=$(echo "${s3_ls_result}" | rev | cut -d' ' -f 1 | rev)
|
||||
to=${from//${PYTORCH_S3_FROM}\//${PYTORCH_S3_TO}}
|
||||
(
|
||||
set -x
|
||||
${AWS} s3 cp ${DRY_RUN_FLAG} \
|
||||
--only-show-errors \
|
||||
--acl public-read \
|
||||
"${PYTORCH_S3_BUCKET}/${from}" \
|
||||
"${PYTORCH_S3_BUCKET}/${to}"
|
||||
)
|
||||
done < <(\
|
||||
aws s3 ls --recursive "${PYTORCH_S3_BUCKET}/${package_type}/${PYTORCH_S3_FROM}" \
|
||||
| grep -E "${package_name}-.*${pytorch_version}" \
|
||||
| sed -e '/dev/d' \
|
||||
(
|
||||
set -x
|
||||
${AWS} s3 cp ${DRY_RUN_FLAG} \
|
||||
--only-show-errors \
|
||||
--acl public-read \
|
||||
--recursive \
|
||||
--exclude '*' \
|
||||
--include "*${package_name}-${pytorch_version}*" \
|
||||
"${PYTORCH_S3_FROM/\/$//}" \
|
||||
"${PYTORCH_S3_TO/\/$//}"
|
||||
)
|
||||
# ^ We grep for package_name-.*pytorch_version to avoid any situations where domain libraries have
|
||||
# the same version on our S3 buckets
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "${DIR}/common_utils.sh"
|
||||
|
||||
aws_promote libtorch libtorch
|
||||
19
scripts/release/promote/s3_to_s3.sh
Executable file
19
scripts/release/promote/s3_to_s3.sh
Executable file
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "${DIR}/common_utils.sh"
|
||||
|
||||
# Allow for users to pass PACKAGE_NAME
|
||||
# For use with other packages, i.e. torchvision, etc.
|
||||
PACKAGE_NAME=${PACKAGE_NAME:-torch}
|
||||
PACKAGE_TYPE=${PACKAGE_TYPE:-whl}
|
||||
|
||||
PYTORCH_S3_BUCKET=${PYTORCH_S3_BUCKET:-s3://pytorch}
|
||||
FROM=${FROM:-test}
|
||||
PYTORCH_S3_FROM=${PYTORCH_S3_FROM:-${PYTORCH_S3_BUCKET}/${PACKAGE_TYPE}/${FROM}}
|
||||
TO=${TO:-}
|
||||
PYTORCH_S3_TO=${PYTORCH_S3_TO:-${PYTORCH_S3_BUCKET}/${PACKAGE_TYPE}/${TO}}
|
||||
|
||||
aws_promote "${PACKAGE_NAME}"
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "${DIR}/common_utils.sh"
|
||||
|
||||
# Allow for users to pass PACKAGE_NAME
|
||||
# For use with other packages, i.e. torchvision, etc.
|
||||
PACKAGE_NAME=${PACKAGE_NAME:-torch}
|
||||
|
||||
aws_promote whl "${PACKAGE_NAME}"
|
||||
63
scripts/release/restore-backup.sh
Executable file
63
scripts/release/restore-backup.sh
Executable file
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "${DIR}/promote/common_utils.sh"
|
||||
|
||||
if [[ -z "${RESTORE_FROM:-}" ]]; then
|
||||
echo "ERROR: RESTORE_FROM environment variable must be specified"
|
||||
echo " example: RESTORE_FROM=v1.6.0-rc3 ${0}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DRY_RUN=${DRY_RUN:-enabled}
|
||||
|
||||
PYTORCH_S3_BACKUP_BUCKET=${PYTORCH_S3_BACKUP_BUCKET:-s3://pytorch-backup/${RESTORE_FROM}}
|
||||
PYTORCH_S3_TEST_BUCKET=${PYTORCH_S3_TEST_BUCKET:-s3://pytorch/}
|
||||
PYTORCH_S3_FROM=${PYTORCH_S3_FROM:-${PYTORCH_S3_BACKUP_BUCKET}}
|
||||
PYTORCH_S3_TO=${PYTORCH_S3_TO:-s3://pytorch/}
|
||||
|
||||
restore_wheels() {
|
||||
aws_promote torch whl
|
||||
}
|
||||
|
||||
restore_libtorch() {
|
||||
aws_promote libtorch-* libtorch
|
||||
}
|
||||
|
||||
ANACONDA="true anaconda"
|
||||
if [[ ${DRY_RUN} = "disabled" ]]; then
|
||||
ANACONDA="anaconda"
|
||||
fi
|
||||
PYTORCH_CONDA_TO=${PYTORCH_CONDA_TO:-pytorch-test}
|
||||
|
||||
upload_conda() {
|
||||
local pkg
|
||||
pkg=${1}
|
||||
(
|
||||
set -x
|
||||
${ANACONDA} upload --skip -u "${PYTORCH_CONDA_TO}" "${pkg}"
|
||||
)
|
||||
}
|
||||
|
||||
export -f upload_conda
|
||||
|
||||
restore_conda() {
|
||||
TMP_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf ${TMP_DIR}' EXIT
|
||||
(
|
||||
set -x
|
||||
aws s3 cp --recursive "${PYTORCH_S3_BACKUP_BUCKET}/conda" "${TMP_DIR}/"
|
||||
)
|
||||
export ANACONDA
|
||||
export PYTORCH_CONDA_TO
|
||||
# Should upload all bz2 packages in parallel for quick restoration
|
||||
find "${TMP_DIR}" -name '*.bz2' -type f \
|
||||
| xargs -P 10 -I % bash -c "(declare -t upload_conda); upload_conda %"
|
||||
}
|
||||
|
||||
|
||||
restore_wheels
|
||||
restore_libtorch
|
||||
restore_conda
|
||||
Loading…
Reference in a new issue