Skip to content

Commit

Permalink
[ci-skip][IE TESTS] removing bathFlag
Browse files Browse the repository at this point in the history
  • Loading branch information
antonzaycev96 committed Jun 29, 2020
1 parent bdb8f4a commit 7a8d9a0
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ void LayerTestsCommon::Compare(const std::vector<std::uint8_t> &expected, const
const auto &precision = actual->getTensorDesc().getPrecision();
auto resize = 1;
// With dynamic batch, you need to size
if (DynamicBathFlag) {
if (configuration.count(InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED)) {
resize = actual->getTensorDesc().getDims()[0];
}
const auto &size = (actual->size() * bathSize / resize);
Expand All @@ -62,9 +62,6 @@ void LayerTestsCommon::Compare(const std::vector<std::uint8_t> &expected, const
void LayerTestsCommon::ConfigurePlugin() {
if (!configuration.empty()) {
core->SetConfig(configuration, targetDevice);
if (configuration.count(InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED)) {
DynamicBathFlag = true;
}
}
}

Expand Down Expand Up @@ -104,7 +101,7 @@ void LayerTestsCommon::Infer() {
inferRequest.SetBlob(info->name(), blob);
inputs.push_back(blob);
}
if (DynamicBathFlag) {
if (configuration.count(InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED)) {
bathSize = cnnNetwork.getInputsInfo().begin()->second->getTensorDesc().getDims()[0] / 2;
inferRequest.SetBatch(bathSize);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ class LayerTestsCommon : public CommonTestUtils::TestsCommon {
std::vector<InferenceEngine::Blob::Ptr> inputs;
float threshold;
InferenceEngine::CNNNetwork cnnNetwork;
bool DynamicBathFlag = false; // check for dynamic batch
int bathSize = 1; // dynamic bath size ( first dimension in inputShapes > 1 !!!)
virtual void Validate();

Expand Down

0 comments on commit 7a8d9a0

Please sign in to comment.