Skip to content

Commit

Permalink
Update Version and cherry pick PRs (#1088)
Browse files Browse the repository at this point in the history
Update Version and cherry pick PRs #1050 and #1066

---------

Co-authored-by: Chester Liu <[email protected]>
Co-authored-by: Baiju Meswani <[email protected]>
  • Loading branch information
3 people authored Nov 22, 2024
1 parent 0ae77b4 commit 83896a1
Show file tree
Hide file tree
Showing 18 changed files with 555 additions and 303 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/linux-cpu-x64-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ jobs:
run: |
export ORTGENAI_LOG_ORT_LIB=1
cd test/csharp
dotnet test /p:Configuration=Release /p:NativeBuildOutputDir="../../build/cpu/" /p:OrtLibDir="../../ort/lib/"
dotnet test /p:Configuration=Release /p:NativeBuildOutputDir="../../build/cpu/" /p:OrtLibDir="../../ort/lib/" --verbosity normal
- name: Run tests
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/mac-cpu-arm64-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
run: |
export ORTGENAI_LOG_ORT_LIB=1
cd test/csharp
dotnet test /p:Configuration=Release /p:NativeBuildOutputDir="../../build/cpu/osx-arm64"
dotnet test /p:Configuration=Release /p:NativeBuildOutputDir="../../build/cpu/osx-arm64" --verbosity normal
- name: Run tests
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/win-cpu-x64-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ jobs:
- name: Build the C# API and Run the C# Tests
run: |
cd test\csharp
dotnet test /p:NativeBuildOutputDir="$env:GITHUB_WORKSPACE\$env:binaryDir\Release" /p:OrtLibDir="$env:GITHUB_WORKSPACE\ort\lib"
dotnet test /p:NativeBuildOutputDir="$env:GITHUB_WORKSPACE\$env:binaryDir\Release" /p:OrtLibDir="$env:GITHUB_WORKSPACE\ort\lib" --verbosity normal
- name: Verify Build Artifacts
if: always()
Expand Down
124 changes: 41 additions & 83 deletions .pipelines/stages/jobs/nuget-validation-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,16 @@ jobs:
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: prebuild_phi3_5_vision_model_folder
${{ if eq(parameters.ep, 'cpu') }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'
${{ elseif eq(parameters.ep, 'cuda') }}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ elseif eq(parameters.ep, 'directml')}}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: cuda_docker_image
${{ if eq(parameters.cuda_version, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1
Expand Down Expand Up @@ -120,98 +130,46 @@ jobs:
inputs:
version: '8.x'

- template: steps/utils/flex-download-pipeline-artifact.yml
parameters:
StepName: 'Download NuGet Artifacts'
ArtifactName: $(artifactName)-nuget
TargetPath: '$(Build.BinariesDirectory)/nuget'
SpecificArtifact: ${{ parameters.specificArtifact }}
BuildId: ${{ parameters.BuildId }}

- template: steps/utils/download-huggingface-model.yml
parameters:
StepName: 'Download Model from HuggingFace'
HuggingFaceRepo: 'microsoft/Phi-3-mini-4k-instruct-onnx'
LocalFolder: 'phi3-mini'
RepoFolder: $(prebuild_phi3_mini_model_folder)
LocalFolder: 'models'
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/csharp/HelloPhi'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/utils//flex-download-pipeline-artifact.yml
- template: steps/nuget-validation-step.yml
parameters:
StepName: 'Download NuGet Artifacts'
ArtifactName: $(artifactName)-nuget
TargetPath: '$(Build.BinariesDirectory)/nuget'
SpecificArtifact: ${{ parameters.specificArtifact }}
BuildId: ${{ parameters.BuildId }}
CsprojFolder: "examples/csharp/HelloPhi"
CsprojName: "HelloPhi"
CsprojConfiguration: $(csproj_configuration)
LocalFolder: 'phi3-mini'
ModelFolder: $(prebuild_phi3_mini_model_folder)

- task: Docker@2
inputs:
containerRegistry: onnxruntimebuildcache
command: "login"
addPipelineData: false
displayName: "Log in to container registry"

- ${{ if eq(parameters.os, 'win') }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- powershell: |
$env:AZCOPY_MSI_CLIENT_ID = "63b63039-6328-442f-954b-5a64d124e5b4";
azcopy.exe cp --recursive "https://lotusscus.blob.core.windows.net/models/cuda_sdk/v$(cuda_version)" 'cuda_sdk'
displayName: 'Download CUDA $(cuda_version)'
workingDirectory: '$(Build.Repository.LocalPath)'
- powershell: |
if ("$(ep)" -eq "cuda") {
$env:CUDA_PATH = '$(Build.Repository.LocalPath)\cuda_sdk\v$(cuda_version)'
$env:PATH = "$env:CUDA_PATH\bin;$env:CUDA_PATH\extras\CUPTI\lib64;$env:PATH"
Write-Host $env:PATH
}
dotnet --info
Copy-Item -Force -Recurse -Verbose $(Build.BinariesDirectory)/nuget/* -Destination examples/csharp/HelloPhi/
cd examples/csharp/HelloPhi
Move-Item models\$(prebuild_phi3_mini_model_folder) models\phi-3
dotnet restore -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet run -r $(os)-$(arch) --configuration $(csproj_configuration) --no-restore --verbosity normal -- -m ./models/phi-3
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- ${{ elseif or(eq(parameters.os, 'linux'), eq(parameters.os, 'osx')) }}:
- bash: |
dotnet --info
cp $(Build.BinariesDirectory)/nuget/* examples/csharp/HelloPhi/
cd examples/csharp/HelloPhi
mv models/$(prebuild_phi3_mini_model_folder) models/phi-3
dotnet restore -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet build ./HelloPhi.csproj -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --no-restore --self-contained
ls -l ./bin/$(csproj_configuration)/net6.0/$(os)-$(arch)/
displayName: 'Perform dotnet restore & build'
workingDirectory: '$(Build.Repository.LocalPath)'
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- ${{ if eq(parameters.ep, 'cuda') }}:
- bash: |
set -e -x
docker pull $(cuda_docker_image)
docker run \
--gpus all \
--rm \
--volume $(Build.Repository.LocalPath):/ort_genai_src \
--volume $(Build.BinariesDirectory):/ort_genai_binary \
-e HF_TOKEN=$HF_TOKEN \
-w /ort_genai_src/ $(cuda_docker_image) \
bash -c " \
export ORTGENAI_LOG_ORT_LIB=1 && \
cd /ort_genai_src/examples/csharp/HelloPhi && \
chmod +x ./bin/Release_Cuda/net6.0/linux-x64/HelloPhi && \
./bin/Release_Cuda/net6.0/linux-x64/HelloPhi -m ./models/phi-3"
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ elseif eq(parameters.ep, 'cpu') }}:
- bash: |
export ORTGENAI_LOG_ORT_LIB=1
cd examples/csharp/HelloPhi
dotnet run -r $(os)-$(arch) --configuration $(csproj_configuration) --no-build --verbosity normal -- -m ./models/phi-3
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- template: steps/utils/download-huggingface-model.yml
parameters:
HuggingFaceRepo: 'microsoft/Phi-3.5-vision-instruct-onnx'
LocalFolder: 'phi3.5-vision'
RepoFolder: $(prebuild_phi3_5_vision_model_folder)
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/csharp/HelloPhi3V'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/compliant-and-cleanup-step.yml
- template: steps/nuget-validation-step.yml
parameters:
CsprojFolder: "examples/csharp/HelloPhi3V"
CsprojName: "HelloPhi3V"
CsprojConfiguration: $(csproj_configuration)
LocalFolder: 'phi3.5-vision'
ModelFolder: $(prebuild_phi3_5_vision_model_folder)

- template: steps/compliant-and-cleanup-step.yml
121 changes: 33 additions & 88 deletions .pipelines/stages/jobs/py-validation-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,16 @@ jobs:
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: prebuild_phi3_5_vision_model_folder
${{ if eq(parameters.ep, 'cpu') }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'
${{ elseif eq(parameters.ep, 'cuda') }}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ elseif eq(parameters.ep, 'directml')}}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: cuda_docker_image
${{ if eq(parameters.cuda_version, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1
Expand Down Expand Up @@ -147,99 +157,34 @@ jobs:

- template: steps/utils/download-huggingface-model.yml
parameters:
StepName: 'Download Model from HuggingFace'
HuggingFaceRepo: 'microsoft/Phi-3-mini-4k-instruct-onnx'
LocalFolder: 'phi3-mini'
RepoFolder: $(prebuild_phi3_mini_model_folder)
LocalFolder: 'models'
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/python'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- task: Docker@2
inputs:
containerRegistry: onnxruntimebuildcache
command: "login"
addPipelineData: false
displayName: "Log in to container registry"

- ${{ if or(eq(parameters.os, 'linux'), eq(parameters.os, 'osx')) }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- bash: |
set -e -x
docker pull $(cuda_docker_image)
python_exe=/opt/python/cp310-cp310/bin/python3.10
docker run \
--gpus all \
--rm \
--volume $(Build.Repository.LocalPath):/ort_genai_src \
--volume $(Build.BinariesDirectory):/ort_genai_binary \
-e HF_TOKEN=$HF_TOKEN \
-w /ort_genai_src/ $(cuda_docker_image) \
bash -c " \
export ORTGENAI_LOG_ORT_LIB=1 && \
$python_exe -m pip install -r /ort_genai_src/test/python/requirements.txt && \
$python_exe -m pip install -r /ort_genai_src/test/python/cuda/torch/requirements.txt && \
$python_exe -m pip install -r /ort_genai_src/test/python/cuda/ort/requirements.txt && \
cd /ort_genai_src/examples/python && \
$python_exe -m pip install --no-index --find-links=/ort_genai_binary/wheel $(pip_package_name) && \
$python_exe model-generate.py -m ./models/$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --verbose"
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ elseif eq(parameters.ep, 'cpu') }}:
- bash: |
export ORTGENAI_LOG_ORT_LIB=1
python -m pip install -r test/python/requirements.txt
if [[ "$(os)" == "linux" ]]; then
python -m pip install -r test/python/cpu/torch/requirements.txt
python -m pip install -r test/python/cpu/ort/requirements.txt
fi
if [[ "$(os)" == "osx" ]]; then
python -m pip install -r test/python/macos/torch/requirements.txt
python -m pip install -r test/python/macos/ort/requirements.txt
fi
cd examples/python
python -m pip install --no-index --find-links=$(Build.BinariesDirectory)/wheel $(pip_package_name)
python model-generate.py -m ./models/$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --verbose
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ if eq(parameters.os, 'win') }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- powershell: |
$env:AZCOPY_MSI_CLIENT_ID = "63b63039-6328-442f-954b-5a64d124e5b4";
azcopy.exe cp --recursive "https://lotusscus.blob.core.windows.net/models/cuda_sdk/v$(cuda_version)" 'cuda_sdk'
displayName: 'Download CUDA $(cuda_version)'
workingDirectory: '$(Build.Repository.LocalPath)'
- powershell: |
if ("$(arch)" -ne "arm64") {
python -m pip install -r test/python/requirements.txt
}
if ("$(ep)" -eq "cuda") {
$env:CUDA_PATH = '$(Build.Repository.LocalPath)\cuda_sdk\v$(cuda_version)'
$env:PATH = "$env:CUDA_PATH\bin;$env:CUDA_PATH\extras\CUPTI\lib64;$env:PATH"
Write-Host $env:PATH
python -m pip install -r test/python/cuda/torch/requirements.txt
python -m pip install -r test/python/cuda/ort/requirements.txt
}
elseif ("$(ep)" -eq "directml") {
python -m pip install -r test/python/directml/torch/requirements.txt
python -m pip install -r test/python/directml/ort/requirements.txt
}
elseif ("$(arch)" -eq "arm64") {
python -m pip install numpy<2
python -m pip install onnxruntime-qnn==1.20.0
}
else {
python -m pip install -r test/python/cpu/torch/requirements.txt
python -m pip install -r test/python/cpu/ort/requirements.txt
}
cd examples\python
python -m pip install --no-index --find-links=$(Build.BinariesDirectory)/wheel $(pip_package_name)
python model-generate.py -m .\models\$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --batch_size_for_cuda_graph 3 --verbose
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- template: steps/python-validation-step.yml
parameters:
PythonScriptFolder: "examples/python"
PythonScriptName: "model-generate.py"
LocalFolder: 'phi3-mini'
ModelFolder: $(prebuild_phi3_mini_model_folder)

- template: steps/utils/download-huggingface-model.yml
parameters:
HuggingFaceRepo: 'microsoft/Phi-3.5-vision-instruct-onnx'
LocalFolder: 'phi3.5-vision'
RepoFolder: $(prebuild_phi3_5_vision_model_folder)
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/python'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/python-validation-step.yml
parameters:
PythonScriptFolder: "examples/python"
PythonScriptName: "phi3v.py"
LocalFolder: 'phi3.5-vision'
ModelFolder: $(prebuild_phi3_5_vision_model_folder)

- template: steps/compliant-and-cleanup-step.yml
Loading

0 comments on commit 83896a1

Please sign in to comment.