mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 00:20:18 +01:00
This reverts commit 3eb3f4ed55.
Also reverts https://github.com/pytorch/pytorch/pull/144164
Manual revert because the above causes merge conflicts
Reverting in favor of https://github.com/pytorch/test-infra/pull/6159
Pull Request resolved: https://github.com/pytorch/pytorch/pull/144706
Approved by: https://github.com/janeyx99, https://github.com/atalman, https://github.com/malfet
This commit is contained in:
parent
d782e46a36
commit
0dca756832
|
|
@ -68,29 +68,17 @@ s3_upload() {
|
||||||
local pkg_type
|
local pkg_type
|
||||||
extension="$1"
|
extension="$1"
|
||||||
pkg_type="$2"
|
pkg_type="$2"
|
||||||
s3_key_prefix="${pkg_type}/${UPLOAD_CHANNEL}"
|
s3_root_dir="${UPLOAD_BUCKET}/${pkg_type}/${UPLOAD_CHANNEL}"
|
||||||
if [[ -z ${UPLOAD_SUBFOLDER:-} ]]; then
|
if [[ -z ${UPLOAD_SUBFOLDER:-} ]]; then
|
||||||
s3_upload_dir="${UPLOAD_BUCKET}/${s3_key_prefix}/"
|
s3_upload_dir="${s3_root_dir}/"
|
||||||
else
|
else
|
||||||
s3_key_prefix="${s3_key_prefix}/${UPLOAD_SUBFOLDER}"
|
s3_upload_dir="${s3_root_dir}/${UPLOAD_SUBFOLDER}/"
|
||||||
s3_upload_dir="${UPLOAD_BUCKET}/${s3_key_prefix}/"
|
|
||||||
fi
|
fi
|
||||||
(
|
(
|
||||||
for pkg in ${PKG_DIR}/*.${extension}; do
|
for pkg in ${PKG_DIR}/*.${extension}; do
|
||||||
(
|
(
|
||||||
set -x
|
set -x
|
||||||
${AWS_S3_CP} --no-progress --acl public-read "${pkg}" "${s3_upload_dir}"
|
${AWS_S3_CP} --no-progress --acl public-read "${pkg}" "${s3_upload_dir}"
|
||||||
if [[ ${pkg_type} == "whl" ]]; then
|
|
||||||
dry_run_arg="--dry-run"
|
|
||||||
if [[ "${DRY_RUN}" = "disabled" ]]; then
|
|
||||||
dry_run_arg=""
|
|
||||||
fi
|
|
||||||
uv run scripts/release/upload_metadata_file.py \
|
|
||||||
--package "${pkg}" \
|
|
||||||
--bucket "${UPLOAD_BUCKET}" \
|
|
||||||
--key-prefix "${s3_key_prefix}" \
|
|
||||||
${dry_run_arg}
|
|
||||||
fi
|
|
||||||
)
|
)
|
||||||
done
|
done
|
||||||
)
|
)
|
||||||
|
|
@ -98,7 +86,7 @@ s3_upload() {
|
||||||
|
|
||||||
# Install dependencies (should be a no-op if previously installed)
|
# Install dependencies (should be a no-op if previously installed)
|
||||||
conda install -yq anaconda-client
|
conda install -yq anaconda-client
|
||||||
pip install -q awscli uv
|
pip install -q awscli
|
||||||
|
|
||||||
case "${PACKAGE_TYPE}" in
|
case "${PACKAGE_TYPE}" in
|
||||||
conda)
|
conda)
|
||||||
|
|
|
||||||
|
|
@ -129,7 +129,6 @@ include_patterns = [
|
||||||
'test/test_type_hints.py',
|
'test/test_type_hints.py',
|
||||||
'test/test_type_info.py',
|
'test/test_type_info.py',
|
||||||
'test/test_utils.py',
|
'test/test_utils.py',
|
||||||
'scripts/release/upload_metadata_file.py',
|
|
||||||
]
|
]
|
||||||
exclude_patterns = [
|
exclude_patterns = [
|
||||||
'**/fb/**',
|
'**/fb/**',
|
||||||
|
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
# /// script
|
|
||||||
# requires-python = ">=3.9"
|
|
||||||
# dependencies = [
|
|
||||||
# "boto3",
|
|
||||||
# ]
|
|
||||||
# ///
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import zipfile
|
|
||||||
from functools import cache
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import boto3
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
|
||||||
parser = argparse.ArgumentParser(description="Upload metadata file to S3")
|
|
||||||
parser.add_argument(
|
|
||||||
"--package", type=str, required=True, help="Path to the package"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--bucket", type=str, required=True, help="S3 bucket to upload metadata file to"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--key-prefix",
|
|
||||||
type=str,
|
|
||||||
required=True,
|
|
||||||
help="S3 key to upload metadata file to",
|
|
||||||
)
|
|
||||||
parser.add_argument("--dry-run", action="store_true", help="Dry run")
|
|
||||||
args = parser.parse_args()
|
|
||||||
# Sanitize the input a bit by removing s3:// prefix + trailing/leading
|
|
||||||
# slashes
|
|
||||||
if args.bucket.startswith("s3://"):
|
|
||||||
args.bucket = args.bucket[5:]
|
|
||||||
args.bucket = args.bucket.strip("/")
|
|
||||||
args.key_prefix = args.key_prefix.strip("/")
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
@cache
|
|
||||||
def get_s3_client() -> Any:
|
|
||||||
return boto3.client("s3")
|
|
||||||
|
|
||||||
|
|
||||||
def s3_upload(s3_bucket: str, s3_key: str, file: str, dry_run: bool) -> None:
|
|
||||||
s3 = get_s3_client()
|
|
||||||
if dry_run:
|
|
||||||
print(f"Dry run uploading {file} to s3://{s3_bucket}/{s3_key}")
|
|
||||||
return
|
|
||||||
s3.upload_file(
|
|
||||||
file,
|
|
||||||
s3_bucket,
|
|
||||||
s3_key,
|
|
||||||
ExtraArgs={"ChecksumAlgorithm": "sha256", "ACL": "public-read"},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def extract_metadata(file: str) -> str:
|
|
||||||
# Copy the file to a temp location to extract the METADATA file
|
|
||||||
file_name = Path(file).name
|
|
||||||
tmp = "/tmp"
|
|
||||||
shutil.copy(file, tmp)
|
|
||||||
zip_file = f"{tmp}/{file_name.replace('.whl', '.zip')}"
|
|
||||||
shutil.move(f"{tmp}/{file_name}", zip_file)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
|
||||||
for filename in zip_ref.infolist():
|
|
||||||
if filename.filename.endswith(".dist-info/METADATA"):
|
|
||||||
filename.filename = "METADATA"
|
|
||||||
if os.path.exists(f"{tmp}/METADATA"):
|
|
||||||
os.remove(f"{tmp}/METADATA")
|
|
||||||
zip_ref.extract(filename, tmp)
|
|
||||||
return tmp
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# https://peps.python.org/pep-0658/
|
|
||||||
# Upload the METADATA file to S3
|
|
||||||
args = parse_args()
|
|
||||||
location = extract_metadata(args.package)
|
|
||||||
metadata_file = f"{location}/METADATA"
|
|
||||||
s3_upload(
|
|
||||||
args.bucket,
|
|
||||||
f"{args.key_prefix}/{Path(args.package).name}.metadata",
|
|
||||||
metadata_file,
|
|
||||||
args.dry_run,
|
|
||||||
)
|
|
||||||
Loading…
Reference in New Issue
Block a user