Create the Nuget WindowsAI Pipeline (#3684)

* add windowsai.yml for new Microsoft.AI.MachineLearning nuget

* temporarily add windowsai.yml to gpu.yml

* pass in build arch

* remove install onnx task

* no dml for arm or arm64

* refactor nuget pipeline defs

* update package creation

* pass in build and sources path

* missing hyphens

* copy license file

* fix parameter variable

* disable arm builds for now

* remove commented script block

* download pipeline atifcat name update

* set working dir

* Add bundling nuget script

* path combine

* null path

* combine needs parentheses

* binplace microsoft.* dlls in new nuget package

* update artifact name

* move merged nuget to artifacts directory

* move to merged subfolder in artifacts staging dir

* forward slash to back

* enable arm

* vcvarsall needs x64 vars setup

* Run Tests

* fix tests

* move global variables

* update yml to not have global variable in template

* removed parameters

* fixes

* Add build arch as an env variable

* ne not neq

* %Var% for batch script

* dont pass argument for x64

* disable arm tests

* skip csharp/cxx tests for microsoft nuget package

* remove test-win as it tests only c# cxx and capi

* test build for store apps

* dont build for store

* tools/nuget/generate_nuspec_for_native_nuget.py

* remove args.

* add new props and targets for microsoft.ai

* make windowsai props/targets static

* add dependency

* dont ship dot net props

* Remove c# fom windowsai nuget

* copy license file

* native packages must have win10 as the platform, not win

* cuda header in wrong if branch

* no dml for arm builds

* only build dml for x64/ x86

* User/sheilk/props update (#3616)

* prelim store work

* props

* Fix desktop nuget props/targets

* clean up targets and make store apps work

Co-authored-by: Sheil Kumar <sheilk@microsoft.com>

* update windowsai.yml with latest

* remove extra dloadhelpers

* Add abi headers to abi dir, and reference native includes

* update windowsai.yml

* minor update

* remove parameters

* add doesrp param

* hard code esrp to true

* add directml for x86/x64

* revert gpu yml changes

* add store builds

* add store builds

* add checks again in old way

* dup job names for store and desktop builds

* move all of the runtime binaries to win10 folder

* only set safeseh on x86

* disable the store builds for now... missing msvcprt.lib

* copy paste deletion...

* switch back to win- (#3646)

Co-authored-by: Sheil Kumar <sheilk@microsoft.com>

* use stahlworks

* & not supported in ado

* add cuda to cpu nuget(???) and EnableDelayedExpansion to enable x86 dml package

* revert nocontribops

* add underscore...

* extra win/win10 change

* merged nuget... still not being bundled...

* files in merged directory

* missing parens causing dml to be included in cpu package

* more diagnostic info

* switch dir to get-childitem

* wait for compression to complete

* add winml_adapter to mkml and gpu packages

* enable_wcos

* add mklml binaries

* props and targets missing from mklml

Co-authored-by: Sheil Kumar <sheilk@microsoft.com>
This commit is contained in:
Sheil Kumar 2020-04-24 20:20:04 -07:00 committed by GitHub
parent 1b7bf481fc
commit a475f2824d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 646 additions and 185 deletions

View file

@ -946,3 +946,16 @@ if (onnxruntime_BUILD_CSHARP)
# set_property(GLOBAL PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "netstandard2.0")
include(onnxruntime_csharp.cmake)
endif()
if (onnxruntime_BUILD_FOR_WINDOWS_STORE)
set_target_properties(onnxruntime PROPERTIES VS_GLOBAL_AppContainerApplication true)
set_target_properties(winml_dll PROPERTIES VS_GLOBAL_AppContainerApplication true)
target_link_options(onnxruntime PRIVATE /DYNAMICBASE /NXCOMPAT /APPCONTAINER)
target_link_options(winml_dll PRIVATE /DYNAMICBASE /NXCOMPAT /APPCONTAINER)
if (onnxruntime_target_platform STREQUAL "x86")
target_link_options(onnxruntime PRIVATE /SAFESEH)
target_link_options(winml_dll PRIVATE /SAFESEH)
endif()
endif()

View file

@ -430,7 +430,14 @@ if (onnxruntime_USE_DML)
endfunction()
target_add_dml(onnxruntime_providers_dml)
target_link_libraries(onnxruntime_providers_dml PRIVATE d3d12.lib dxgi.lib delayimp.lib)
target_link_libraries(onnxruntime_providers_dml PRIVATE d3d12.lib dxgi.lib)
if (onnxruntime_BUILD_FOR_WINDOWS_STORE)
target_link_libraries(onnxruntime_providers_dml PRIVATE dloadhelper.lib)
else()
target_link_libraries(onnxruntime_providers_dml PRIVATE delayimp.lib)
endif()
set(onnxruntime_DELAYLOAD_FLAGS "${onnxruntime_DELAYLOAD_FLAGS} /DELAYLOAD:DirectML.dll /DELAYLOAD:d3d12.dll /DELAYLOAD:dxgi.dll")
# The DML EP requires C++17

View file

@ -615,7 +615,11 @@ target_link_libraries(winml_dll PRIVATE winml_lib_api)
target_link_libraries(winml_dll PRIVATE winml_lib_image)
target_link_libraries(winml_dll PRIVATE winml_lib_ort)
target_link_libraries(winml_dll PRIVATE winml_lib_telemetry)
target_link_libraries(winml_dll PRIVATE delayimp.lib)
if (onnxruntime_BUILD_FOR_WINDOWS_STORE)
target_link_libraries(winml_dll PRIVATE dloadhelper.lib)
else()
target_link_libraries(winml_dll PRIVATE delayimp.lib)
endif()
# Any project that links in debug_alloc.obj needs this lib.
# unresolved external symbol __imp_SymSetOptions

View file

@ -72,7 +72,7 @@ CMake creates a target to this project
</Exec>
</Target>
<Target Name="ObtainPackageVersion" BeforeTargets="CreatePackage">
<Target Name="ObtainPackageVersion" BeforeTargets="CreatePackage;CreateWindowsAIPackage">
<ReadLinesFromFile File="..\VERSION_NUMBER">
<Output TaskParameter="Lines" ItemName="MajorVersionNumber"/>
</ReadLinesFromFile>
@ -119,8 +119,24 @@ CMake creates a target to this project
<Exec ContinueOnError="False" Command="$(NugetExe) pack NativeNuget.nuspec" ConsoleToMSBuild="true" WorkingDirectory="$(NativeBuildOutputDirAbs)">
<Output TaskParameter="ConsoleOutput" PropertyName="OutputOfExec" />
</Exec>
</Target>
<ItemGroup>
<LicenseFile Include="$(OnnxRuntimeSourceDirectory)\LICENSE"/>
</ItemGroup>
<Target Name="CreateWindowsAIPackage">
<Copy SourceFiles="@(LicenseFile)" DestinationFiles="@(LicenseFile->'$(OnnxRuntimeSourceDirectory)\%(Filename).txt')"/>
<Message Importance="High" Text="Generating nuspec for the Microsoft.AI.MachineLearning Nuget package ..." />
<Exec ContinueOnError="False" Command="python ..\tools\nuget\generate_nuspec_for_native_nuget.py --package_version $(PackageVersion) --package_name Microsoft.AI.MachineLearning --target_architecture $(TargetArchitecture) --build_config $(Configuration) --native_build_path $(NativeBuildOutputDirAbs) --packages_path $(OnnxRuntimePackagesDirectoryAbs) --ort_build_path $(OnnxRuntimeBuildDirectoryAbs) --sources_path $(OnnxRuntimeSourceDirectoryAbs) --commit_id $(GitCommitHash) --is_release_build $(IsReleaseBuild)" ConsoleToMSBuild="true">
<Output TaskParameter="ConsoleOutput" PropertyName="GenerateNuspecOutput" />
</Exec>
<Message Importance="High" Text="Bundling native shared library artifacts into a NuGet package ..." />
<Exec ContinueOnError="False" Command="$(NugetExe) pack NativeNuget.nuspec" ConsoleToMSBuild="true" WorkingDirectory="$(NativeBuildOutputDirAbs)">
<Output TaskParameter="ConsoleOutput" PropertyName="OutputOfExec" />
</Exec>
</Target>
</Project>

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)../../build/native/include/;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<ResourceCompile>
<AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)../../build/native/include/;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
</ItemDefinitionGroup>
</Project>

View file

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<WindowsAI-Platform Condition="'$(Platform)' == 'Win32'">x86</WindowsAI-Platform>
<WindowsAI-Platform Condition="'$(Platform)' != 'Win32'">$(Platform)</WindowsAI-Platform>
</PropertyGroup>
<PropertyGroup>
<WindowsAIBinary>$(MSBuildThisFileDirectory)..\..\runtimes\win-$(WindowsAI-Platform)\native\Microsoft.AI.MachineLearning.dll</WindowsAIBinary>
<OnnxRuntimeBinary>$(MSBuildThisFileDirectory)..\..\runtimes\win-$(WindowsAI-Platform)\native\onnxruntime.dll</OnnxRuntimeBinary>
<DirectMLBinary>$(MSBuildThisFileDirectory)..\..\runtimes\win-$(WindowsAI-Platform)\native\directml.dll</DirectMLBinary>
</PropertyGroup>
<ItemGroup>
<Reference Include="$(MSBuildThisFileDirectory)\..\..\lib\uap10.0\Microsoft.AI.MachineLearning.winmd">
<Implementation>$(WindowsAIBinary)</Implementation>
</Reference>
</ItemGroup>
<PropertyGroup Condition="'$(WindowsAI-Platform)' == x64 OR '$(WindowsAI-Platform)' == x86">
<MLBinaries>$(WindowsAIBinary);$(OnnxRuntimeBinary);$(DirectMLBinary)</MLBinaries>
</PropertyGroup>
<PropertyGroup Condition="'$(WindowsAI-Platform)' == arm OR '$(WindowsAI-Platform)' == arm64">
<MLBinaries>$(WindowsAIBinary);$(OnnxRuntimeBinary)</MLBinaries>
</PropertyGroup>
<Target Name="CopyMLBinaries" BeforeTargets="Build">
<PropertyGroup>
<WindowsAIBinplaceMessage>Binplacing WindowsAI binaries: {0} and {1}.</WindowsAIBinplaceMessage>
</PropertyGroup>
<Message Text="$([System.String]::Format('$(WindowsAIBinplaceMessage)', '$(WindowsAIBinary)', '$(OnnxRuntimeBinary)'))" />
<Copy SkipUnchangedFiles="True" SourceFiles="$(MLBinaries)" DestinationFolder="$(OutDir)" />
</Target>
</Project>

View file

@ -93,78 +93,6 @@
CopyToOutputDirectory="Always"
Visible="false"
/>
<None Include="$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.dll"
Condition="Exists('$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.dll')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\runtimes\win-$(TargetArchitecture)\native"
Pack="false"
CopyToOutputDirectory="Always"
Visible="false"
/>
<None Include="$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.lib"
Condition="Exists('$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.lib')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\runtimes\win-$(TargetArchitecture)\native"
Pack="false"
CopyToOutputDirectory="Never"
Visible="false"
/>
<None Include="$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.pdb"
Condition="Exists('$(NativeBuildOutputDir)\Microsoft.AI.MachineLearning.pdb')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\runtimes\win-$(TargetArchitecture)\native"
Pack="false"
CopyToOutputDirectory="Always"
Visible="false"
/>
<None Include="$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.winmd"
Condition="Exists('$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.winmd')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\build\native\metadata"
Pack="false"
CopyToOutputDirectory="Always"
Visible="false"
/>
<None Include="$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.h"
Condition="Exists('$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.h')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\build\native\include"
Pack="false"
CopyToOutputDirectory="Never"
Visible="false"
/>
<None Include="$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.Native.h"
Condition="Exists('$(OnnxRuntimeBuildDirectory)\$(Configuration)\Microsoft.AI.MachineLearning.Native.h')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\build\native\include"
Pack="false"
CopyToOutputDirectory="Never"
Visible="false"
/>
<None Include="$(OnnxRuntimeRoot)\winml\api\dualapipartitionattribute.h"
Condition="Exists('$(OnnxRuntimeRoot)\winml\api\dualapipartitionattribute.h')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\build\native\include"
Pack="false"
CopyToOutputDirectory="Never"
Visible="false"
/>
<None Include="$(OnnxRuntimeRoot)\winml\api\dualapipartitionattribute.h"
Condition="Exists('$(OnnxRuntimeRoot)\winml\api\dualapipartitionattribute.h')
And ('$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'
Or '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime')"
PackagePath="\build\native\include"
Pack="false"
CopyToOutputDirectory="Never"
Visible="false"
/>
<None Include="$(NativeBuildOutputDir)\DirectML.dll"
Condition="Exists('$(NativeBuildOutputDir)\DirectML.dll')
And '$(OrtPackageId)' == 'Microsoft.ML.OnnxRuntime.DirectML'"

View file

@ -290,6 +290,9 @@ def parse_arguments():
parser.add_argument(
"--enable_wcos", action='store_true',
help="Build for Windows Core OS.")
parser.add_argument(
"--enable_windows_store", action='store_true',
help="Build for Windows Store")
parser.add_argument(
"--enable_lto", action='store_true',
help="Enable Link Time Optimization")
@ -554,6 +557,7 @@ def generate_build_tree(cmake_path, source_dir, build_dir, cuda_home,
bool(os.getenv('NIGHTLY_BUILD') == '1')) else "OFF"),
"-Donnxruntime_USE_DML=" + ("ON" if args.use_dml else "OFF"),
"-Donnxruntime_USE_WINML=" + ("ON" if args.use_winml else "OFF"),
"-Donnxruntime_BUILD_FOR_WINDOWS_STORE=" + ("ON" if args.enable_windows_store else "OFF"),
"-Donnxruntime_USE_TELEMETRY=" + (
"ON" if args.use_telemetry else "OFF"),
"-Donnxruntime_ENABLE_LTO=" + ("ON" if args.enable_lto else "OFF"),

View file

@ -22,9 +22,6 @@ steps:
move win-x86\runtimes\win-x86\native\onnxruntime.dll %%~ni\runtimes\win-x86\native\onnxruntime.dll
move win-x86\runtimes\win-x86\native\onnxruntime.lib %%~ni\runtimes\win-x86\native\onnxruntime.lib
move win-x86\runtimes\win-x86\native\onnxruntime.pdb %%~ni\runtimes\win-x86\native\onnxruntime.pdb
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.dll %%~ni\runtimes\win-x86\native\Microsoft.AI.MachineLearning.dll
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.lib %%~ni\runtimes\win-x86\native\Microsoft.AI.MachineLearning.lib
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.pdb %%~ni\runtimes\win-x86\native\Microsoft.AI.MachineLearning.pdb
move linux-x64\linux-x64\libonnxruntime.so %%~ni\runtimes\linux-x64\native\libonnxruntime.so
unzip osx-x64.zip -d osx-x64
dir osx-x64 /s

View file

@ -6,7 +6,7 @@ jobs:
parameters:
AgentPool : 'Win-CPU-2019'
JobName: 'Windows_CI_Dev'
BuildCommand: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --use_mklml --build_shared_lib --enable_onnx_tests --cmake_generator "Visual Studio 16 2019"'
BuildCommand: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --use_mklml --use_winml --enable_wcos --build_shared_lib --enable_onnx_tests --cmake_generator "Visual Studio 16 2019"'
BuildArch: 'x64'
EnvSetupScript: 'setup_env.bat'
sln_platform: 'x64'

View file

@ -8,7 +8,7 @@ jobs:
AgentPool : 'Win-GPU-2019'
ArtifactName: 'drop-nuget'
JobName: 'Windows_CI_GPU_CUDA_Dev'
BuildCommand: --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --use_telemetry --cmake_generator "Visual Studio 16 2019" --use_cuda --cuda_version=10.1 --cuda_home="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1" --cudnn_home="C:\local\cudnn-10.1-windows10-x64-v7.6.5.32\cuda"
BuildCommand: --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_winml --use_telemetry --cmake_generator "Visual Studio 16 2019" --use_cuda --cuda_version=10.1 --cuda_home="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1" --cudnn_home="C:\local\cudnn-10.1-windows10-x64-v7.6.5.32\cuda"
BuildArch: 'x64'
msbuildArchitecture: 'amd64'
EnvSetupScript: 'setup_env_cuda.bat'
@ -175,29 +175,28 @@ jobs:
powershell -Command "Invoke-WebRequest http://stahlworks.com/dev/unzip.exe -OutFile unzip.exe"
powershell -Command "Invoke-WebRequest http://stahlworks.com/dev/zip.exe -OutFile zip.exe"
set PATH=%CD%;%PATH%
SETLOCAL EnableDelayedExpansion
FOR /R %%i IN (*.nupkg) do (
rename %%~ni.nupkg %%~ni.zip
unzip %%~ni.zip -d %%~ni
del /Q %%~ni.zip
set filename=%%~ni
IF NOT "!filename:~25,7!"=="Managed" (
rename %%~ni.nupkg %%~ni.zip
unzip %%~ni.zip -d %%~ni
del /Q %%~ni.zip
unzip win-dml-x86.zip -d win-x86
mkdir %%~ni\runtimes\win-x86
mkdir %%~ni\runtimes\win-x86\native
unzip win-dml-x86.zip -d win-x86
mkdir %%~ni\runtimes\win-x86
mkdir %%~ni\runtimes\win-x86\native
move win-x86\runtimes\win-x86\native\onnxruntime.dll %%~ni\runtimes\win-x86\native\onnxruntime.dll
move win-x86\runtimes\win-x86\native\onnxruntime.lib %%~ni\runtimes\win-x86\native\onnxruntime.lib
move win-x86\runtimes\win-x86\native\onnxruntime.pdb %%~ni\runtimes\win-x86\native\onnxruntime.pdb
move win-x86\runtimes\win-x86\native\onnxruntime.dll %%~ni\runtimes\win-x86\native\onnxruntime.dll
move win-x86\runtimes\win-x86\native\onnxruntime.lib %%~ni\runtimes\win-x86\native\onnxruntime.lib
move win-x86\runtimes\win-x86\native\onnxruntime.pdb %%~ni\runtimes\win-x86\native\onnxruntime.pdb
move win-x86\runtimes\win-x86\native\directml.dll %%~ni\runtimes\win-x86\native\directml.dll
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.dll %%~ni\runtimes\win-x86\native\microsoft.ai.machinelearning.dll
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.lib %%~ni\runtimes\win-x86\native\microsoft.ai.machinelearning.lib
move win-x86\runtimes\win-x86\native\microsoft.ai.machinelearning.pdb %%~ni\runtimes\win-x86\native\microsoft.ai.machinelearning.pdb
move win-x86\runtimes\win-x86\native\directml.dll %%~ni\runtimes\win-x86\native\directml.dll
pushd %%~ni
zip -r ..\%%~ni.zip .
popd
move %%~ni.zip %%~ni.nupkg
pushd %%~ni
zip -r ..\%%~ni.zip .
popd
move %%~ni.zip %%~ni.nupkg
)
)
popd
copy $(Build.BinariesDirectory)\nuget-artifact-dml\Microsoft.ML.OnnxRuntime.DirectML*nupkg $(Build.ArtifactStagingDirectory)
@ -237,4 +236,4 @@ jobs:
- template: test_linux.yml
parameters:
AgentPool : 'Linux-GPU-CUDA10'
TestGPU : 'true'
TestGPU : 'true'

View file

@ -0,0 +1,236 @@
jobs:
- job: WindowsAI_DirectML_X64
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-GPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'x64'
- job: WindowsAI_DirectML_X86
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-GPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'x86'
- job: WindowsAI_CPU_ARM64
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-CPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'arm64'
- job: WindowsAI_CPU_ARM
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-CPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'arm'
- ${{ if eq('false', 'true') }}:
- job: WindowsAI_DirectML_X64_Store
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-GPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'x64'
BuildForStore: 'true'
- job: WindowsAI_DirectML_X86_Store
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-GPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'x86'
BuildForStore: 'true'
- job: WindowsAI_CPU_ARM64_Store
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-CPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'arm64'
BuildForStore: 'true'
- job: WindowsAI_CPU_ARM_Store
timeoutInMinutes: 120
workspace:
clean: all
pool:
name: 'Win-CPU-2019'
demands: []
steps:
- template: ../../templates/windowsai-nuget-build.yml
parameters:
BuildArch: 'arm'
BuildForStore: 'true'
- job: NuGet_Packaging
workspace:
clean: all
pool: 'Win-GPU-2019'
dependsOn:
- WindowsAI_DirectML_X64
- WindowsAI_DirectML_X86
- WindowsAI_CPU_ARM64
- WindowsAI_CPU_ARM
condition: succeeded()
steps:
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact - NuGet DirectML x64'
inputs:
artifactName: 'Microsoft.AI.MachineLearning.x64'
targetPath: '$(Build.BinariesDirectory)/nuget-artifact-x64'
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact - NuGet DirectML x86'
inputs:
artifactName: 'Microsoft.AI.MachineLearning.x86'
targetPath: '$(Build.BinariesDirectory)/nuget-artifact-x86'
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact - NuGet CPU ARM64'
inputs:
artifactName: 'Microsoft.AI.MachineLearning.arm64'
targetPath: '$(Build.BinariesDirectory)/nuget-artifact-arm64'
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact - NuGet CPU ARM'
inputs:
artifactName: 'Microsoft.AI.MachineLearning.arm'
targetPath: '$(Build.BinariesDirectory)/nuget-artifact-arm'
- task: PowerShell@2
displayName: 'Bundle NuGet and other binaries'
inputs:
targetType: 'inline'
script: |
Add-Type -AssemblyName "System.IO.Compression.FileSystem"
$packages = (Get-ChildItem -Filter Microsoft.AI.MachineLearning* -Recurse)
$x64_nuget_package_name = $packages[0].Name
$x64_nuget_package = $packages[0].FullName
$x64_unzipped_directory_root = $packages[0].Directory.FullName
$x64_unzipped_directory = [System.IO.Path]::Combine($x64_unzipped_directory_root, [System.IO.Path]::GetFileNameWithoutExtension($x64_nuget_package))
[System.IO.Compression.ZipFile]::ExtractToDirectory($x64_nuget_package, $x64_unzipped_directory)
$packages = (Get-ChildItem ..\nuget-artifact-x86 -Filter Microsoft.AI.MachineLearning* -Recurse)
$x86_nuget_package = $packages[0].FullName
$x86_unzipped_directory_root = $packages[0].Directory.FullName
$x86_unzipped_directory = [System.IO.Path]::Combine($x86_unzipped_directory_root, [System.IO.Path]::GetFileNameWithoutExtension($x86_nuget_package))
[System.IO.Compression.ZipFile]::ExtractToDirectory($x86_nuget_package, $x86_unzipped_directory)
$packages = (Get-ChildItem ..\nuget-artifact-arm64 -Filter Microsoft.AI.MachineLearning* -Recurse)
$arm64_nuget_package = $packages[0].FullName
$arm64_unzipped_directory_root = $packages[0].Directory.FullName
$arm64_unzipped_directory = [System.IO.Path]::Combine($arm64_unzipped_directory_root, [System.IO.Path]::GetFileNameWithoutExtension($arm64_nuget_package))
[System.IO.Compression.ZipFile]::ExtractToDirectory($arm64_nuget_package, $arm64_unzipped_directory)
$packages = (Get-ChildItem ..\nuget-artifact-arm -Filter Microsoft.AI.MachineLearning* -Recurse)
$arm_nuget_package = $packages[0].FullName
$arm_unzipped_directory_root = $packages[0].Directory.FullName
$arm_unzipped_directory = [System.IO.Path]::Combine($arm_unzipped_directory_root, [System.IO.Path]::GetFileNameWithoutExtension($arm_nuget_package))
[System.IO.Compression.ZipFile]::ExtractToDirectory($arm_nuget_package, $arm_unzipped_directory)
$x86_runtime_path_old = [System.IO.Path]::Combine($x86_unzipped_directory, 'runtimes', 'win-x86', 'native')
$x86_runtime_path_new = [System.IO.Path]::Combine($x64_unzipped_directory, 'runtimes', 'win-x86', 'native')
$arm64_runtime_path_old = [System.IO.Path]::Combine($arm64_unzipped_directory, 'runtimes', 'win-arm64', 'native')
$arm64_runtime_path_new = [System.IO.Path]::Combine($x64_unzipped_directory, 'runtimes', 'win-arm64', 'native')
$arm_runtime_path_old = [System.IO.Path]::Combine($arm_unzipped_directory, 'runtimes', 'win-arm', 'native')
$arm_runtime_path_new = [System.IO.Path]::Combine($x64_unzipped_directory, 'runtimes', 'win-arm', 'native')
New-Item -Path $x86_runtime_path_new -ItemType Directory
New-Item -Path $arm64_runtime_path_new -ItemType Directory
New-Item -Path $arm_runtime_path_new -ItemType Directory
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'onnxruntime.dll')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'onnxruntime.lib')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'onnxruntime.pdb')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'microsoft.ai.machinelearning.dll')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'microsoft.ai.machinelearning.lib')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'microsoft.ai.machinelearning.pdb')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($x86_runtime_path_old, 'directml.dll')) $x86_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'onnxruntime.dll')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'onnxruntime.lib')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'onnxruntime.pdb')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'microsoft.ai.machinelearning.dll')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'microsoft.ai.machinelearning.lib')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm64_runtime_path_old, 'microsoft.ai.machinelearning.pdb')) $arm64_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'onnxruntime.dll')) $arm_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'onnxruntime.lib')) $arm_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'onnxruntime.pdb')) $arm_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'microsoft.ai.machinelearning.dll')) $arm_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'microsoft.ai.machinelearning.lib')) $arm_runtime_path_new
Copy-Item ([System.IO.Path]::Combine($arm_runtime_path_old, 'microsoft.ai.machinelearning.pdb')) $arm_runtime_path_new
$merged_nuget_path = [System.IO.Path]::Combine($Env:BUILD_ARTIFACTSTAGINGDIRECTORY, 'merged')
New-Item -Path $merged_nuget_path -ItemType Directory
$merged_nuget = [System.IO.Path]::Combine($merged_nuget_path, $x64_nuget_package_name)
$zip_tool_directory = [System.IO.Path]::Combine($x64_unzipped_directory_root, 'ziptool')
New-Item -Path $zip_tool_directory -ItemType Directory
$zip_tool = [System.IO.Path]::Combine($zip_tool_directory, 'zip.exe')
Invoke-WebRequest http://stahlworks.com/dev/zip.exe -OutFile $zip_tool
Start-Process -FilePath $zip_tool -ArgumentList "-r $merged_nuget ." -WorkingDirectory $x64_unzipped_directory -NoNewWindow -Wait
workingDirectory: $(Build.BinariesDirectory)\nuget-artifact-x64
- template: ../../templates/esrp_nuget.yml
parameters:
DisplayName: 'ESRP - sign NuGet package'
FolderPath: '$(Build.ArtifactStagingDirectory)/merged'
DoEsrp: 'true'
- template: ../../templates/validate-nuget.yml
parameters:
NugetPath: '$(Build.ArtifactStagingDirectory)\merged'
NugetPackage: 'Microsoft.AI.MachineLearning*nupkg'
PlatformsSupported: 'win-x64,win-x86,win-arm64,win-arm'
VerifyNugetSigning: 'true'
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline NuGet Artifact'
inputs:
artifactName: 'drop-signed-nuget'
targetPath: '$(Build.ArtifactStagingDirectory)/merged'

View file

@ -0,0 +1,13 @@
steps:
- powershell: |
if($env:TELEMETRYGUID)
{
$length = $env:TELEMETRYGUID.length
$fileContent = "#define ENABLE_TELEMETRY`n#define TraceLoggingOptionMicrosoftTelemetry() \
TraceLoggingOptionGroup("+$env:TELEMETRYGUID.substring(1, $length-2)+")"
New-Item -Path "$(Build.SourcesDirectory)\include\onnxruntime\core\platform\windows\TraceLoggingConfigPrivate.h" -ItemType "file" -Value "$fileContent" -Force
Write-Output "Enabling TELEMETRY"
}
displayName: 'Create TraceLoggingConfigPrivate.h For WinML Telemetry'
env:
TELEMETRYGUID: $(TELEMETRYGUID)

View file

@ -0,0 +1,185 @@
parameters:
BuildArch: 'x64'
RunTests : 'true'
BuildForStore: 'false'
steps:
- template: enable-telemetry.yml
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
addToPath: true
${{ if eq(parameters.BuildArch, 'x64') }}:
architecture: ${{ parameters.BuildArch }}
${{ if eq(parameters.BuildArch, 'x86') }}:
architecture: ${{ parameters.BuildArch }}
${{ if eq(parameters.BuildArch, 'arm64') }}:
architecture: 'x64'
${{ if eq(parameters.BuildArch, 'arm') }}:
architecture: 'x64'
- task: BatchScript@1
displayName: 'Setup Environment'
inputs:
${{ if eq(parameters.BuildArch, 'x64') }}:
filename: '$(Build.SourcesDirectory)\tools\ci_build\github\windows\setup_env.bat'
${{ if eq(parameters.BuildArch, 'x86') }}:
filename: '$(Build.SourcesDirectory)\tools\ci_build\github\windows\setup_env_x86.bat'
${{ if eq(parameters.BuildArch, 'arm64') }}:
filename: '$(Build.SourcesDirectory)\tools\ci_build\github\windows\setup_env.bat'
${{ if eq(parameters.BuildArch, 'arm') }}:
filename: '$(Build.SourcesDirectory)\tools\ci_build\github\windows\setup_env.bat'
modifyEnvironment: true
workingFolder: '$(Build.BinariesDirectory)'
- task: PythonScript@0
displayName: 'Generate CMake Configuration'
inputs:
scriptPath: '$(Build.SourcesDirectory)\tools\ci_build\build.py'
${{ if and(eq(parameters.BuildArch, 'x64'), eq(parameters.BuildForStore, 'false')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_dml --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto'
${{ if and(eq(parameters.BuildArch, 'x64'), eq(parameters.BuildForStore, 'true')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_dml --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --enable_windows_store'
${{ if and(eq(parameters.BuildArch, 'x86'), eq(parameters.BuildForStore, 'false')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_dml --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --x86'
${{ if and(eq(parameters.BuildArch, 'x86'), eq(parameters.BuildForStore, 'true')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_dml --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --x86 --enable_windows_store'
${{ if and(eq(parameters.BuildArch, 'arm64'), eq(parameters.BuildForStore, 'false')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --arm64'
${{ if and(eq(parameters.BuildArch, 'arm64'), eq(parameters.BuildForStore, 'true')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --arm64 --enable_windows_store'
${{ if and(eq(parameters.BuildArch, 'arm'), eq(parameters.BuildForStore, 'false')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --arm'
${{ if and(eq(parameters.BuildArch, 'arm'), eq(parameters.BuildForStore, 'true')) }}:
arguments: '--build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --enable_onnx_tests --enable_wcos --use_telemetry --use_winml --cmake_generator "Visual Studio 16 2019" --update --config RelWithDebInfo --enable_lto --arm --enable_windows_store'
workingDirectory: '$(Build.BinariesDirectory)'
- ${{ if notIn(parameters['sln_platform'], 'Win32', 'x64') }}:
# Use cross-compiled protoc
- script: |
@echo ##vso[task.setvariable variable=ProtocDirectory]$(Build.BinariesDirectory)\host_protoc\Release
- task: VSBuild@1
displayName: 'Build'
inputs:
solution: '$(Build.BinariesDirectory)\RelWithDebInfo\onnxruntime.sln'
${{ if ne(parameters.BuildArch, 'x86') }}:
platform: ${{ parameters.BuildArch }}
${{ if eq(parameters.BuildArch, 'x86') }}:
platform: 'Win32'
configuration: RelWithDebInfo
msbuildArchitecture: ${{ parameters.BuildArch }}
maximumCpuCount: true
logProjectEvents: true
workingFolder: '$(Build.BinariesDirectory)\RelWithDebInfo'
createLogFile: true
- ${{ if eq(parameters.BuildArch, 'x64') }}:
- script: |
mklink /D /J $(Build.BinariesDirectory)\RelWithDebInfo\models $(Build.BinariesDirectory)\models
DIR dist\ /S /B > wheel_filename_file
set /p WHEEL_FILENAME=<wheel_filename_file
del wheel_filename_file
python.exe -m pip install -q --upgrade %WHEEL_FILENAME%
set PATH=%PATH%;$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo
@echo %PATH%
python $(Build.SourcesDirectory)\tools\ci_build\build.py --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --test --config RelWithDebInfo --enable_lto %BUILD_ARCH_SWITCH%
workingDirectory: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo'
displayName: 'Run Unit Tests'
env:
${{ if eq(parameters.BuildArch, 'x64') }}:
BUILD_ARCH_SWITCH: ''
${{ if ne(parameters.BuildArch, 'x64') }}:
BUILD_ARCH_SWITCH: '--${{ parameters.BuildArch }}'
- task: PublishTestResults@2
displayName: 'Publish unit test results'
inputs:
testResultsFiles: '**\*.results.xml'
searchFolder: '$(Build.BinariesDirectory)'
testRunTitle: 'Unit Test Run'
condition: succeededOrFailed()
- task: NuGetToolInstaller@0
displayName: Use Nuget 4.9
inputs:
versionSpec: 4.9.4
- task: BatchScript@1
displayName: 'Setup VS2019 env vars'
inputs:
filename: 'C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat'
${{ if eq(parameters.BuildArch, 'x64') }}:
arguments: 'x64'
${{ if eq(parameters.BuildArch, 'x86') }}:
arguments: 'x86'
${{ if eq(parameters.BuildArch, 'arm64') }}:
arguments: 'x64'
${{ if eq(parameters.BuildArch, 'arm') }}:
arguments: 'x64'
modifyEnvironment: true
# Esrp signing
- template: esrp_dll.yml
parameters:
FolderPath: '$(Build.BinariesDirectory)\RelWithDebInfo'
DisplayName: 'ESRP - Sign Native dlls'
DoEsrp: 'true'
- ${{ if eq(parameters.BuildArch, 'x64') }}:
- script: |
msbuild $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj /p:Configuration=RelWithDebInfo /t:CreateWindowsAIPackage /p:OnnxRuntimeBuildDirectory=$(Build.BinariesDirectory) /p:OnnxRuntimeSourceDirectory=$(Build.SourcesDirectory)
copy $(Build.SourcesDirectory)\csharp\src\Microsoft.AI.MachineLearning\bin\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
copy $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
workingDirectory: '$(Build.SourcesDirectory)\csharp'
displayName: 'Create NuGet Package'
- ${{ if eq(parameters.BuildArch, 'x86') }}:
- script: |
msbuild $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj /p:Configuration=RelWithDebInfo /t:CreateWindowsAIPackage /p:OnnxRuntimeBuildDirectory=$(Build.BinariesDirectory) /p:OnnxRuntimeSourceDirectory=$(Build.SourcesDirectory) /p:TargetArchitecture=x86
copy $(Build.SourcesDirectory)\csharp\src\Microsoft.AI.MachineLearning\bin\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
copy $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
workingDirectory: '$(Build.SourcesDirectory)\csharp'
displayName: 'Create NuGet Package'
- ${{ if eq(parameters.BuildArch, 'arm64') }}:
- script: |
msbuild $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj /p:Configuration=RelWithDebInfo /t:CreateWindowsAIPackage /p:OnnxRuntimeBuildDirectory=$(Build.BinariesDirectory) /p:OnnxRuntimeSourceDirectory=$(Build.SourcesDirectory) /p:TargetArchitecture=arm64 /p:ProtocDirectory=$(Build.BinariesDirectory)\host_protoc\Release
copy $(Build.SourcesDirectory)\csharp\src\Microsoft.AI.MachineLearning\bin\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
copy $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
workingDirectory: '$(Build.SourcesDirectory)\csharp'
displayName: 'Create NuGet Package'
- ${{ if eq(parameters.BuildArch, 'arm') }}:
- script: |
msbuild $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj /p:Configuration=RelWithDebInfo /t:CreateWindowsAIPackage /p:OnnxRuntimeBuildDirectory=$(Build.BinariesDirectory) /p:OnnxRuntimeSourceDirectory=$(Build.SourcesDirectory) /p:TargetArchitecture=arm /p:ProtocDirectory=$(Build.BinariesDirectory)\host_protoc\Release
copy $(Build.SourcesDirectory)\csharp\src\Microsoft.AI.MachineLearning\bin\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
copy $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo\*.nupkg $(Build.ArtifactStagingDirectory)
workingDirectory: '$(Build.SourcesDirectory)\csharp'
displayName: 'Create NuGet Package'
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact: Microsoft.AI.MachineLearning ${{ parameters.BuildArch }}'
inputs:
artifactName: 'Microsoft.AI.MachineLearning.${{ parameters.BuildArch }}'
targetPath: '$(Build.ArtifactStagingDirectory)'
- task: PublishSymbols@2
displayName: 'Publish Build Symbols'
condition: eq(variables['IsReleaseBuild'], 'true')
inputs:
symbolsFolder: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo'
searchPattern: '**/*.pdb'
symbolServerType: teamServices
# Compliance tasks require logs from Debug Build
- ${{ if eq(parameters['DoCompliance'], 'true') }}:
- template: compliance.yml
- template: component-governance-component-detection-steps.yml
parameters :
condition : 'succeeded'
- template: clean-agent-build-directory-step.yml

View file

@ -55,25 +55,23 @@ def generate_project_url(list, project_url):
def generate_repo_url(list, repo_url, commit_id):
list.append('<repository type="git" url="' + repo_url + '"' + ' commit="' + commit_id + '" />')
def generate_dependencies(list, version):
list.append('<dependencies>')
def generate_dependencies(list, package_name, version):
if (package_name != 'Microsoft.AI.MachineLearning'):
list.append('<dependencies>')
# Support .Net Core
list.append('<group targetFramework="NETCOREAPP">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
# Support .Net Standard
list.append('<group targetFramework="NETSTANDARD">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
# Support .Net Framework
list.append('<group targetFramework="NETFRAMEWORK">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
# Support .Net Core
list.append('<group targetFramework="NETCOREAPP">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
# Support .Net Standard
list.append('<group targetFramework="NETSTANDARD">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
# Support .Net Framework
list.append('<group targetFramework="NETFRAMEWORK">')
list.append('<dependency id="Microsoft.ML.OnnxRuntime.Managed"' + ' version="' + version + '"/>')
list.append('</group>')
list.append('</dependencies>')
list.append('</dependencies>')
def get_env_var(key):
return os.environ.get(key)
@ -106,7 +104,7 @@ def generate_metadata(list, args):
generate_license(metadata_list)
generate_project_url(metadata_list, 'https://github.com/Microsoft/onnxruntime')
generate_repo_url(metadata_list, 'https://github.com/Microsoft/onnxruntime.git', args.commit_id)
generate_dependencies(metadata_list, args.package_version)
generate_dependencies(metadata_list, args.package_name, args.package_version)
generate_release_notes(metadata_list)
metadata_list.append('</metadata>')
@ -115,90 +113,106 @@ def generate_metadata(list, args):
def generate_files(list, args):
files_list = ['<files>']
is_cpu_package = args.package_name == 'Microsoft.ML.OnnxRuntime'
is_mklml_package = args.package_name == 'Microsoft.ML.OnnxRuntime.MKLML'
is_cuda_gpu_package = args.package_name == 'Microsoft.ML.OnnxRuntime.Gpu'
is_dml_package = args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML'
is_windowsai_package = args.package_name == 'Microsoft.AI.MachineLearning'
includes_cuda = is_cuda_gpu_package or is_cpu_package # Why does the CPU package ship the cuda provider headers?
includes_winml = is_windowsai_package
includes_directml = (is_dml_package or is_windowsai_package) and (args.target_architecture == 'x64' or args.target_architecture == 'x86')
# Process headers
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'include\\onnxruntime\\core\\session\\onnxruntime_*.h') + '" target="build\\native\\include" />')
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'include\\onnxruntime\\core\\providers\\cpu\\cpu_provider_factory.h') + '" target="build\\native\\include" />')
if (args.package_name != 'Microsoft.ML.OnnxRuntime.DirectML'):
if includes_cuda:
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'include\\onnxruntime\\core\\providers\\cuda\\cuda_provider_factory.h') + '" target="build\\native\\include" />')
else: # it is a DirectML package
if includes_directml:
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'include\\onnxruntime\\core\\providers\\dml\\dml_provider_factory.h') + '" target="build\\native\\include" />')
# Process DirectML dll
if os.path.exists(os.path.join(args.native_build_path, 'DirectML.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'DirectML.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'DirectML.pdb') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.packages_path, 'DirectML.0.0.1\\LICENSE.txt') + '" target="DirectML_LICENSE.txt" />')
if includes_winml:
# Add microsoft.ai.machinelearning headers
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.h') + '" target="build\\native\\include\\abi\\Microsoft.AI.MachineLearning.h" />')
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'winml\\api\\dualapipartitionattribute.h') + '" target="build\\native\\include\\abi\\dualapipartitionattribute.h" />')
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.native.h') + '" target="build\\native\\include\\Microsoft.AI.MachineLearning.Native.h" />')
# Process microsoft.ai.machinelearning.winmd
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.winmd') + '" target="lib\\uap10.0\\Microsoft.AI.MachineLearning.winmd" />')
# Process runtimes
# Process onnxruntime import lib, dll, and pdb
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnxruntime.lib') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnxruntime.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnxruntime.pdb') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
if includes_directml:
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'DirectML.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'DirectML.pdb') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('<file src=' + '"' + os.path.join(args.packages_path, 'DirectML.0.0.1\\LICENSE.txt') + '" target="DirectML_LICENSE.txt" />')
if includes_winml:
# Process microsoft.ai.machinelearning import lib, dll, and pdb
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.lib') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.lib" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.dll" />')
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.pdb') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.pdb" />')
if is_cpu_package or is_cuda_gpu_package or is_dml_package or is_mklml_package:
# Process dnll.dll
if os.path.exists(os.path.join(args.native_build_path, 'dnnl.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'dnnl.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process mklml.dll
if os.path.exists(os.path.join(args.native_build_path, 'mklml.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'mklml.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
if (is_windows()):
# Process Microsoft.AI.MachineLearning lib, dll, and pdb
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.lib') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.lib" />')
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.dll" />')
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'microsoft.ai.machinelearning.pdb') + '" target="runtimes\\win-' + args.target_architecture + '\\native\\Microsoft.AI.MachineLearning.pdb" />')
# Process microsoft.ai.machinelearning.winmd
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.winmd') + '" target="build\\native\\metadata\\Microsoft.AI.MachineLearning.winmd" />')
# Process microsoft.ai.machinelearning headers
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.h') + '" target="build\\native\\include\\Microsoft.AI.MachineLearning.h" />')
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'microsoft.ai.machinelearning.native.h') + '" target="build\\native\\include\\Microsoft.AI.MachineLearning.Native.h" />')
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime'):
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'winml\\api\\dualapipartitionattribute.h') + '" target="build\\native\\include\\dualapipartitionattribute.h" />')
if (args.package_name == 'Microsoft.ML.OnnxRuntime.DirectML' or args.package_name == 'Microsoft.ML.OnnxRuntime') and os.path.exists(os.path.join(args.ort_build_path, args.build_config, 'dualapipartitionattribute.h')):
files_list.append('<file src=' + '"' + os.path.join(args.ort_build_path, args.build_config, 'dualapipartitionattribute.h') + '" target="build\\native\\include\\dualapipartitionattribute.h" />')
# Process dnll.dll
if os.path.exists(os.path.join(args.native_build_path, 'dnnl.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'dnnl.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process libiomp5md.dll
if os.path.exists(os.path.join(args.native_build_path, 'libiomp5md.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'libiomp5md.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process mklml.dll
if os.path.exists(os.path.join(args.native_build_path, 'mklml.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'mklml.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process tvm.dll
if os.path.exists(os.path.join(args.native_build_path, 'tvm.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'tvm.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process libiomp5md.dll
if os.path.exists(os.path.join(args.native_build_path, 'libiomp5md.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'libiomp5md.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process tvm.dll
if os.path.exists(os.path.join(args.native_build_path, 'tvm.dll')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'tvm.dll') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Some tools to be packaged in nightly build only, should not be released
# These are copied to the runtimes folder for convenience of loading with the dlls
if args.is_release_build.lower() != 'true' and args.target_architecture == 'x64' and os.path.exists(os.path.join(args.native_build_path, 'onnxruntime_perf_test.exe')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnxruntime_perf_test.exe') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
if args.is_release_build.lower() != 'true' and args.target_architecture == 'x64' and os.path.exists(os.path.join(args.native_build_path, 'onnx_test_runner.exe')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnx_test_runner.exe') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
# Process props and targets files
if is_windowsai_package:
# Process props file
windowsai_props = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'Microsoft.AI.MachineLearning.props')
files_list.append('<file src=' + '"' + windowsai_props + '" target="build\\native" />')
# Process targets files
windowsai_targets = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'Microsoft.AI.MachineLearning.targets')
files_list.append('<file src=' + '"' + windowsai_targets + '" target="build\\native" />')
if is_cpu_package or is_cuda_gpu_package or is_dml_package or is_mklml_package:
# Process props file
source_props = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'props.xml')
target_props = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', args.package_name + '.props')
os.system('copy ' + source_props + ' ' + target_props)
files_list.append('<file src=' + '"' + target_props + '" target="build\\native" />')
files_list.append('<file src=' + '"' + target_props + '" target="build\\netstandard1.1" />')
# Process targets file
source_targets = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'targets.xml')
target_targets = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', args.package_name + '.targets')
os.system('copy ' + source_targets + ' ' + target_targets)
files_list.append('<file src=' + '"' + target_targets + '" target="build\\native" />')
files_list.append('<file src=' + '"' + target_targets + '" target="build\\netstandard1.1" />')
# Process License, ThirdPartyNotices, Privacy, README
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'LICENSE.txt') + '" target="LICENSE.txt" />')
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'ThirdPartyNotices.txt') + '" target="ThirdPartyNotices.txt" />')
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'docs', 'Privacy.md') + '" target="Privacy.md" />')
files_list.append('<file src=' + '"' + os.path.join(args.sources_path, 'docs', 'C_API.md') + '" target="README.md" />')
# Process props file
source_props = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'props.xml')
target_props = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', args.package_name + '.props')
os.system('copy ' + source_props + ' ' + target_props)
files_list.append('<file src=' + '"' + target_props + '" target="build\\native" />')
files_list.append('<file src=' + '"' + target_props + '" target="build\\netstandard1.1" />')
# Process targets file
source_targets = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', 'targets.xml')
target_targets = os.path.join(args.sources_path, 'csharp', 'src', 'Microsoft.ML.OnnxRuntime', args.package_name + '.targets')
os.system('copy ' + source_targets + ' ' + target_targets)
files_list.append('<file src=' + '"' + target_targets + '" target="build\\native" />')
files_list.append('<file src=' + '"' + target_targets + '" target="build\\netstandard1.1" />')
# Some tools to be packaged in nightly build only, should not be released
# These are copied to the runtimes folder for convenience of loading with the dlls
if args.is_release_build.lower() != 'true' and args.target_architecture == 'x64' and os.path.exists(os.path.join(args.native_build_path, 'onnxruntime_perf_test.exe')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnxruntime_perf_test.exe') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
if args.is_release_build.lower() != 'true' and args.target_architecture == 'x64' and os.path.exists(os.path.join(args.native_build_path, 'onnx_test_runner.exe')):
files_list.append('<file src=' + '"' + os.path.join(args.native_build_path, 'onnx_test_runner.exe') + '" target="runtimes\\win-' + args.target_architecture + '\\native" />')
files_list.append('</files>')