Update python version metadata (remove 3.7, 3.8, 3.9; add 3.13). (#23067)

### Description

* Update python version metadata to be in sync with latest python
packages (onnxruntime, onnxruntime-gpu and onnxruntime-qnn).
* Update black format target-version to 3.10, and use lintrunner to
format all files.
* Update the lintrunner installation command line to be consistent.
* Include `requirements-lintrunner.txt` in `requirements-dev.txt` to
avoid duplicated settings.

### Motivation and Context

https://github.com/microsoft/onnxruntime/issues/22993

Python support by numpy:
https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule
```
On Apr 05, 2024 drop support for Python 3.9
On Apr 04, 2025 drop support for Python 3.10
```
This commit is contained in:
Tianlei Wu 2024-12-17 10:59:20 -08:00 committed by GitHub
parent 0981bbf4ca
commit 5afab787db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 25 additions and 39 deletions

View file

@ -45,7 +45,7 @@ jobs:
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
# Version range or exact version of Python to use, using SemVer's version range syntax. Reads from .python-version if unset. # Use the version configured in target-version of [tool.black] section in pyproject.toml.
python-version: "3.10" python-version: "3.10"
- name: Setup Rust - name: Setup Rust
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -55,12 +55,10 @@ jobs:
- name: Update PATH - name: Update PATH
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
- name: Install dependencies - name: Install dependencies
run: | run: |
set -e -x set -e -x
python -m pip install --user -r requirements-dev.txt python -m pip install --user -r requirements-dev.txt
python -m pip install --user lintrunner lintrunner-adapters
lintrunner init lintrunner init
- name: Run lintrunner on all files - name: Run lintrunner on all files
run: | run: |

View file

@ -2,31 +2,23 @@
# You can install the dependencies and initialize with # You can install the dependencies and initialize with
# #
# ```sh # ```sh
# pip install lintrunner lintrunner-adapters # pip install -r requirements-lintrunner.txt
# lintrunner init # lintrunner init
# ``` # ```
# #
# This will install lintrunner on your system and download all the necessary # This will install lintrunner on your system and download all the necessary
# dependencies to run linters locally. # dependencies to run linters locally.
# If you want to see what lintrunner init will install, run
# `lintrunner init --dry-run`.
# #
# To lint local changes: # To format local changes:
# #
# ```bash # ```bash
# lintrunner # lintrunner -a
# ``` # ```
# #
# To lint all files: # To format all files:
# #
# ```bash # ```bash
# lintrunner --all-files # lintrunner -a --all-files
# ```
#
# To format files:
#
# ```bash
# lintrunner f --all-files
# ``` # ```
# #
# To read more about lintrunner, see [wiki](https://github.com/pytorch/pytorch/wiki/lintrunner). # To read more about lintrunner, see [wiki](https://github.com/pytorch/pytorch/wiki/lintrunner).

View file

@ -164,22 +164,16 @@ dependencies to run linters locally.
If you want to see what lintrunner init will install, run If you want to see what lintrunner init will install, run
`lintrunner init --dry-run`. `lintrunner init --dry-run`.
To lint local changes: To format local changes:
```bash
lintrunner
```
To format files and apply suggestions:
```bash ```bash
lintrunner -a lintrunner -a
``` ```
To lint all files: To format all files:
```bash ```bash
lintrunner --all-files lintrunner -a --all-files
``` ```
To show help text: To show help text:

View file

@ -185,10 +185,13 @@ def generate_artifacts(
logging.info("Custom op library provided: %s", custom_op_library) logging.info("Custom op library provided: %s", custom_op_library)
custom_op_library_path = pathlib.Path(custom_op_library) custom_op_library_path = pathlib.Path(custom_op_library)
with onnxblock.base(loaded_model, model_path), ( with (
onnxblock.custom_op_library(custom_op_library_path) onnxblock.base(loaded_model, model_path),
if custom_op_library is not None (
else contextlib.nullcontext() onnxblock.custom_op_library(custom_op_library_path)
if custom_op_library is not None
else contextlib.nullcontext()
),
): ):
_ = training_block(*[output.name for output in loaded_model.graph.output]) _ = training_block(*[output.name for output in loaded_model.graph.output])
training_model, eval_model = training_block.to_model_proto() training_model, eval_model = training_block.to_model_proto()

View file

@ -867,8 +867,9 @@ class GraphTransitionManager:
assert model_info_for_export.export_mode is not None, "Please use a concrete instance of ExecutionManager" assert model_info_for_export.export_mode is not None, "Please use a concrete instance of ExecutionManager"
try: try:
with torch.no_grad(), stage3_export_context( with (
enable_zero_stage3_support, stage3_param_handle, flattened_module torch.no_grad(),
stage3_export_context(enable_zero_stage3_support, stage3_param_handle, flattened_module),
): ):
required_export_kwargs = { required_export_kwargs = {
"input_names": model_info_for_export.onnx_graph_input_names, # did not contains parameters as its input yet "input_names": model_info_for_export.onnx_graph_input_names, # did not contains parameters as its input yet

View file

@ -2,7 +2,8 @@
line-length = 120 line-length = 120
# NOTE: Do not extend the exclude list. Edit .lintrunner.toml instead # NOTE: Do not extend the exclude list. Edit .lintrunner.toml instead
extend-exclude = "cmake|onnxruntime/core/flatbuffers/" extend-exclude = "cmake|onnxruntime/core/flatbuffers/"
target-version = ["py37", "py38", "py39", "py310", "py311"] # NOTE: use the minimum supported python version as target-version
target-version = ["py310"]
[tool.isort] [tool.isort]
# NOTE: Do not extend the exclude list. Edit .lintrunner.toml instead # NOTE: Do not extend the exclude list. Edit .lintrunner.toml instead

View file

@ -1,7 +1,6 @@
black>=22.3 -r requirements-lintrunner.txt
cerberus cerberus
flatbuffers flatbuffers
isort
jinja2 jinja2
numpy numpy
onnx onnx

View file

@ -529,6 +529,8 @@ classifiers = [
"Intended Audience :: Developers", "Intended Audience :: Developers",
"License :: OSI Approved :: MIT License", "License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux", "Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Scientific/Engineering :: Artificial Intelligence",
@ -537,14 +539,10 @@ classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python", "Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.12",
"Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3.13",
"Operating System :: MacOS",
] ]
if enable_training or enable_training_apis: if enable_training or enable_training_apis: