Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add CLI parameters to test runner, build WinML in ARM and x86 CI #2479

Merged
merged 6 commits into from
Nov 27, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cmake/external/googletest
Submodule googletest updated 329 files
10 changes: 7 additions & 3 deletions cmake/winml_unittests.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ function(add_winml_test)
if (_UT_DEPENDS)
add_dependencies(${_UT_TARGET} ${_UT_DEPENDS})
endif()
target_link_libraries(${_UT_TARGET} PRIVATE ${_UT_LIBS} gtest_main windowsapp winml_lib_image ${onnxruntime_EXTERNAL_LIBRARIES})
target_link_libraries(${_UT_TARGET} PRIVATE ${_UT_LIBS} gtest windowsapp winml_lib_image ${onnxruntime_EXTERNAL_LIBRARIES})

add_test(NAME ${_UT_TARGET}
COMMAND ${_UT_TARGET}
Expand All @@ -66,11 +66,15 @@ add_winml_test(
)
target_precompiled_header(winml_test_api testPch.h)

file(GLOB winml_test_scenario_src CONFIGURE_DEPENDS "${WINML_TEST_SRC_DIR}/scenario/cppwinrt/*.cpp")
if (onnxruntime_USE_DML)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

onnxruntime_USE_DML [](start = 4, length = 19)

just confirming - we won't run scenario tests without DML?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My understanding is that it's not possible in its current state, right? Scenario tests don't compile without DML.

I'll change the tests to skip compilation of tests that depend on DML when it's disabled (add some #ifdef), but I think this PR could be merged as-is.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sounds good


In reply to: 351031299 [](ancestors = 351031299)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed it yesterday to build scenario tests except CustomOps.cpp but forgot to push the changes, pushed them now.

file(GLOB winml_test_scenario_src CONFIGURE_DEPENDS "${WINML_TEST_SRC_DIR}/scenario/cppwinrt/*.cpp")
else()
set(winml_test_scenario_src "${WINML_TEST_SRC_DIR}/scenario/cppwinrt/scenariotestscppwinrt.cpp")
endif()
add_winml_test(
TARGET winml_test_scenario
SOURCES ${winml_test_scenario_src}
LIBS winml_test_common onnxruntime_providers_dml
LIBS winml_test_common
DEPENDS winml_api
)
target_precompiled_header(winml_test_scenario testPch.h)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ jobs:
displayName: 'Generate cmake config and build Debug'
inputs:
filename: '$(Build.BinariesDirectory)\packages\python\python.exe'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Debug --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Debug --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm --use_winml'
workingDirectory: "$(Build.BinariesDirectory)"
- task: CmdLine@1
displayName: 'Generate cmake config and build Release'
inputs:
filename: '$(Build.BinariesDirectory)\packages\python\python.exe'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Release --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Release --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm --use_winml'
workingDirectory: "$(Build.BinariesDirectory)"
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
condition: and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI'))
- template: templates/clean-agent-build-directory-step.yml
condition: and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI'))
- template: templates/clean-agent-build-directory-step.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ jobs:
displayName: 'Generate cmake config and build Debug'
inputs:
filename: '$(Build.BinariesDirectory)\packages\python\python.exe'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Debug --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm64'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Debug --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm64 --use_winml'
workingDirectory: "$(Build.BinariesDirectory)"
- task: CmdLine@1
displayName: 'Generate cmake config and build Release'
inputs:
filename: '$(Build.BinariesDirectory)\packages\python\python.exe'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Release --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm64'
arguments: '$(Build.SourcesDirectory)\tools\ci_build\build.py --config Release --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --arm64 --use_winml'
workingDirectory: "$(Build.BinariesDirectory)"
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
condition: and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI'))
- template: templates/clean-agent-build-directory-step.yml
condition: and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI'))
- template: templates/clean-agent-build-directory-step.yml
2 changes: 1 addition & 1 deletion tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ jobs:
AgentPool : 'Win-CPU'
DoDebugBuild: 'true'
DoCompliance: 'false'
BuildCommand: '$(Build.SourcesDirectory)\tools\ci_build\build.py --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --ctest_path $(Build.BinariesDirectory)\cmake\bin\ctest.exe --use_tvm --use_automl --enable_pybind --use_mkldnn --use_openmp --use_dml --use_winml --build_shared_lib --build_csharp --enable_onnx_tests'
BuildCommand: '$(Build.SourcesDirectory)\tools\ci_build\build.py --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --ctest_path $(Build.BinariesDirectory)\cmake\bin\ctest.exe --use_tvm --use_automl --enable_pybind --use_mkldnn --use_openmp --use_winml --build_shared_lib --build_csharp --enable_onnx_tests'
JobName: 'Windows_CI_Dev'
DoNugetPack: 'false'
NuPackScript : ''
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ jobs:
AgentPool : 'Win-CPU'
DoDebugBuild: 'true'
DoCompliance: 'false'
BuildCommand: '$(Build.SourcesDirectory)\tools\ci_build\build.py --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --ctest_path $(Build.BinariesDirectory)\cmake\bin\ctest.exe --use_openmp --build_shared_lib --build_csharp --enable_onnx_tests --x86'
BuildCommand: '$(Build.SourcesDirectory)\tools\ci_build\build.py --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --cmake_path $(Build.BinariesDirectory)\cmake\bin\cmake.exe --ctest_path $(Build.BinariesDirectory)\cmake\bin\ctest.exe --use_openmp --use_winml --build_shared_lib --build_csharp --enable_onnx_tests --x86'
JobName: 'Windows_CI_Dev_x86'
DoNugetPack: 'false'
NuPackScript : ''
9 changes: 8 additions & 1 deletion winml/test/api/LearningModelAPITest.cpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#include "testPch.h"

#include "APITest.h"

#include <winrt/Windows.Graphics.Imaging.h>
Expand Down Expand Up @@ -26,7 +27,13 @@ class LearningModelAPITest : public APITest
};

class LearningModelAPITestGpu : public LearningModelAPITest
{};
{
protected:
void SetUp() override
{
GPUTEST
}
};

TEST_F(LearningModelAPITest, CreateModelFromFilePath)
{
Expand Down
13 changes: 8 additions & 5 deletions winml/test/api/LearningModelBindingAPITest.cpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#include "testPch.h"

#include "APITest.h"
#include "SqueezeNetValidator.h"

Expand All @@ -18,7 +19,13 @@ class LearningModelBindingAPITest : public APITest
{};

class LearningModelBindingAPITestGpu : public LearningModelBindingAPITest
{};
{
protected:
void SetUp() override
{
GPUTEST
}
};

TEST_F(LearningModelBindingAPITest, CpuSqueezeNet)
{
Expand Down Expand Up @@ -289,7 +296,6 @@ TEST_F(LearningModelBindingAPITest, ZipMapString)

TEST_F(LearningModelBindingAPITestGpu, GpuSqueezeNet)
{

std::string gpuInstance("GPU");
WinML::Engine::Test::ModelValidator::SqueezeNet(
gpuInstance,
Expand All @@ -299,7 +305,6 @@ TEST_F(LearningModelBindingAPITestGpu, GpuSqueezeNet)

TEST_F(LearningModelBindingAPITestGpu, GpuSqueezeNetEmptyOutputs)
{

std::string gpuInstance("GPU");
WinML::Engine::Test::ModelValidator::SqueezeNet(
gpuInstance,
Expand All @@ -311,7 +316,6 @@ TEST_F(LearningModelBindingAPITestGpu, GpuSqueezeNetEmptyOutputs)

TEST_F(LearningModelBindingAPITestGpu, GpuSqueezeNetUnboundOutputs)
{

std::string gpuInstance("GPU");
WinML::Engine::Test::ModelValidator::SqueezeNet(
gpuInstance,
Expand Down Expand Up @@ -357,7 +361,6 @@ TEST_F(LearningModelBindingAPITestGpu, ImageBindingDimensions)

TEST_F(LearningModelBindingAPITestGpu, VerifyInvalidBindExceptions)
{

EXPECT_NO_THROW(LoadModel(L"zipmap-int64.onnx"));

LearningModelSession session(m_model);
Expand Down
20 changes: 15 additions & 5 deletions winml/test/api/LearningModelSessionAPITest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,23 @@ class LearningModelSessionAPITests : public APITest
{};

class LearningModelSessionAPITestsGpu : public APITest
{}; // TODO create a constructor that calls GTEST_SKIP when GPU tests are disabled
{
protected:
void SetUp() override
{
GPUTEST
}
};

class LearningModelSessionAPITestsSkipEdgeCore : public LearningModelSessionAPITestsGpu
{}; // TODO create a constructor that calls GTEST_SKIP when on EdgeCore
{
protected:
void SetUp() override
{
LearningModelSessionAPITestsGpu::SetUp();
SKIP_EDGECORE
}
};

TEST_F(LearningModelSessionAPITests, CreateSessionDeviceDefault)
{
Expand Down Expand Up @@ -60,7 +73,6 @@ TEST_F(LearningModelSessionAPITests, CreateSessionWithModelLoadedFromStream)

TEST_F(LearningModelSessionAPITestsGpu, CreateSessionDeviceDirectX)
{

EXPECT_NO_THROW(LoadModel(L"model.onnx"));

EXPECT_NO_THROW(m_device = LearningModelDevice(LearningModelDeviceKind::DirectX));
Expand All @@ -69,7 +81,6 @@ TEST_F(LearningModelSessionAPITestsGpu, CreateSessionDeviceDirectX)

TEST_F(LearningModelSessionAPITestsGpu, CreateSessionDeviceDirectXHighPerformance)
{

EXPECT_NO_THROW(LoadModel(L"model.onnx"));

EXPECT_NO_THROW(m_device = LearningModelDevice(LearningModelDeviceKind::DirectXHighPerformance));
Expand All @@ -78,7 +89,6 @@ TEST_F(LearningModelSessionAPITestsGpu, CreateSessionDeviceDirectXHighPerformanc

TEST_F(LearningModelSessionAPITestsGpu, CreateSessionDeviceDirectXMinimumPower)
{

EXPECT_NO_THROW(LoadModel(L"model.onnx"));

EXPECT_NO_THROW(m_device = LearningModelDevice(LearningModelDeviceKind::DirectXMinPower));
Expand Down
52 changes: 52 additions & 0 deletions winml/test/common/main.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#include <iostream>
#include <unordered_map>
#include <gtest/gtest.h>

#include "runtimeParameters.h"

namespace RuntimeParameters
{
std::unordered_map<std::string, std::string> Parameters;
}

namespace
{
void usage(char **argv, int failedArgument)
{
std::cerr << "Unrecognized argument: " << argv[failedArgument] << "\n"
<< "Usage:\n\t"
<< argv[0] << " [/p:parameterName=parameterValue ...]\n";
}

bool parseArgument(const std::string& argument)
{
if (argument.rfind("/p:", 0) == 0)
{
// Parse argument in the form of /p:parameterName=parameterValue
auto separatorIndex = argument.find('=');
if (separatorIndex == std::string::npos || separatorIndex == 3)
{
return false;
}
auto parameterName = argument.substr(3, separatorIndex - 3);
auto parameterValue = argument.substr(separatorIndex + 1);
RuntimeParameters::Parameters[parameterName] = parameterValue;
return true;
}
return false;
}
}

int main(int argc, char **argv)
{
::testing::InitGoogleTest(&argc, argv);
for (int i = 1; i < argc; i++)
{
if (!parseArgument(argv[i]))
{
usage(argv, i);
return -1;
}
}
return RUN_ALL_TESTS();
}
9 changes: 9 additions & 0 deletions winml/test/common/runtimeParameters.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

#pragma once
namespace RuntimeParameters
{
// Runtime parameters passed through CLI arguments
extern std::unordered_map<std::string, std::string> Parameters;
}
21 changes: 20 additions & 1 deletion winml/test/common/std.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include <utility>
#include <vector>

#include <gtest/gtest.h>

// IUnknown must be declared before winrt/base.h is included to light up support for native COM
// interfaces with C++/WinRT types (e.g. winrt::com_ptr<ITensorNative>).
Expand All @@ -30,6 +31,8 @@
// WinML
#include "Windows.AI.MachineLearning.Native.h"

#include "runtimeParameters.h"

#define EXPECT_THROW_SPECIFIC(statement, exception, condition) \
EXPECT_THROW( \
try { \
Expand All @@ -42,10 +45,26 @@

// For old versions of gtest without GTEST_SKIP, stream the message and return success instead
#ifndef GTEST_SKIP
#define GTEST_SKIP(message) return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
#define GTEST_SKIP_(message) \
return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
#define GTEST_SKIP GTEST_SKIP_("")
#endif

#ifndef INSTANTIATE_TEST_SUITE_P
// Use the old name, removed in newer versions of googletest
#define INSTANTIATE_TEST_SUITE_P INSTANTIATE_TEST_CASE_P
#endif

#define GPUTEST \
if (auto noGpuTests = RuntimeParameters::Parameters.find("noGPUtests"); \
noGpuTests != RuntimeParameters::Parameters.end() && noGpuTests->second != "0") \
{ \
GTEST_SKIP() << "GPU tests disabled"; \
}

#define SKIP_EDGECORE \
if (auto isEdgeCore = RuntimeParameters::Parameters.find("EdgeCore"); \
isEdgeCore != RuntimeParameters::Parameters.end() && isEdgeCore->second != "0") \
{ \
GTEST_SKIP() << "Test can't be run in EdgeCore"; \
}
9 changes: 6 additions & 3 deletions winml/test/scenario/cppwinrt/CustomOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
#include <winrt/Windows.Storage.Streams.h>
#include <MemoryBuffer.h>
#include <gsl/gsl>
// #include "dml/api/DirectML.h"
#include "CustomOperatorProvider.h"
#include "runtimeParameters.h"

// For custom operator and shape inferencing support
#include "core/providers/dml/DmlExecutionProvider/inc/MLOperatorAuthor.h"
Expand Down Expand Up @@ -41,6 +41,11 @@ class CustomOpsScenarioTest : public ::testing::Test

class CustomOpsScenarioGpuTest : public CustomOpsScenarioTest
{
protected:
void SetUp() override
{
GPUTEST
}
};

// Tests that the execution provider correctly fuses operators together when custom ops are involved.
Expand Down Expand Up @@ -173,8 +178,6 @@ struct LocalCustomOperatorProvider :
{
LocalCustomOperatorProvider()
{
using namespace OperatorHelper;

EXPECT_HRESULT_SUCCEEDED(MLCreateOperatorRegistry(m_registry.put()));
}

Expand Down
Loading