Skip to content

Commit

Permalink
Introduce INC 3.0 quantization API and port torch RTN into 3.0 (#1380)
Browse files Browse the repository at this point in the history
Signed-off-by: yiliu30 <[email protected]>
Signed-off-by: chensuyue <[email protected]>
  • Loading branch information
yiliu30 authored Nov 14, 2023
1 parent da3442d commit dc9328c
Show file tree
Hide file tree
Showing 23 changed files with 1,038 additions and 5 deletions.
4 changes: 4 additions & 0 deletions .azure-pipelines/model-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ pr:
- .azure-pipelines/model-test.yml
- .azure-pipelines/scripts/models
- examples/tensorflow/oob_models/quantization/ptq
exclude:
- test
- neural_compressor/common
- neural_compressor/torch

pool: MODEL_PERF_TEST_TF

Expand Down
2 changes: 1 addition & 1 deletion .azure-pipelines/scripts/install_nc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

cd /neural-compressor
python -m pip install --no-cache-dir -r requirements.txt
python setup.py sdist bdist_wheel
python setup.py bdist_wheel
pip install dist/neural_compressor*.whl
pip list
130 changes: 130 additions & 0 deletions .azure-pipelines/scripts/ut/collect_log_3x.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
source /neural-compressor/.azure-pipelines/scripts/change_color.sh

pip install coverage
export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.${1}
coverage_log="/neural-compressor/log_dir/coverage_log"
coverage_log_base="/neural-compressor/log_dir/coverage_log_base"
coverage_compare="/neural-compressor/log_dir/coverage_compare.html"
cd /neural-compressor/log_dir

$BOLD_YELLOW && echo "collect coverage for PR branch" && $RESET
cp ut_coverage_3x/.coverage /neural-compressor/
mkdir -p coverage_PR
cd /neural-compressor
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log}
coverage html -d log_dir/coverage_PR/htmlcov --rcfile=${COVERAGE_RCFILE}
coverage xml -o log_dir/coverage_PR/coverage.xml --rcfile=${COVERAGE_RCFILE}
ls -l log_dir/coverage_PR/htmlcov


$BOLD_YELLOW && echo "collect coverage for baseline" && $RESET
cd /neural-compressor
git config --global --add safe.directory /neural-compressor
git fetch
git checkout master
echo y | pip uninstall neural-compressor
cd /neural-compressor/.azure-pipelines/scripts && bash install_nc.sh

coverage erase
cd /neural-compressor/log_dir
mkdir -p coverage_base
rm -rf /neural-compressor/.coverage || true
cp ut_coverage_3x_baseline/.coverage /neural-compressor

cd /neural-compressor
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log_base}
coverage html -d log_dir/coverage_base/htmlcov --rcfile=${COVERAGE_RCFILE}
coverage xml -o log_dir/coverage_base/coverage.xml --rcfile=${COVERAGE_RCFILE}
ls -l log_dir/coverage_base/htmlcov

get_coverage_data() {
# Input argument
local coverage_xml="$1"

# Get coverage data
local coverage_data=$(python3 -c "import xml.etree.ElementTree as ET; root = ET.parse('$coverage_xml').getroot(); print(ET.tostring(root).decode())")
if [[ -z "$coverage_data" ]]; then
echo "Failed to get coverage data from $coverage_xml."
exit 1
fi

# Get lines coverage
local lines_covered=$(echo "$coverage_data" | grep -o 'lines-covered="[0-9]*"' | cut -d '"' -f 2)
local lines_valid=$(echo "$coverage_data" | grep -o 'lines-valid="[0-9]*"' | cut -d '"' -f 2)
if [ $lines_valid == 0 ]; then
local lines_coverage=0
else
local lines_coverage=$(awk "BEGIN {printf \"%.3f\", 100 * $lines_covered / $lines_valid}")
fi

# Get branches coverage
local branches_covered=$(echo "$coverage_data" | grep -o 'branches-covered="[0-9]*"' | cut -d '"' -f 2)
local branches_valid=$(echo "$coverage_data" | grep -o 'branches-valid="[0-9]*"' | cut -d '"' -f 2)
if [ $branches_valid == 0 ]; then
local branches_coverage=0
else
local branches_coverage=$(awk "BEGIN {printf \"%.3f\", 100 * $branches_covered/$branches_valid}")
fi

# Return values
echo "$lines_covered $lines_valid $lines_coverage $branches_covered $branches_valid $branches_coverage"
}

$BOLD_YELLOW && echo "compare coverage" && $RESET

coverage_PR_xml="log_dir/coverage_PR/coverage.xml"
coverage_PR_data=$(get_coverage_data $coverage_PR_xml)
read lines_PR_covered lines_PR_valid coverage_PR_lines_rate branches_PR_covered branches_PR_valid coverage_PR_branches_rate <<<"$coverage_PR_data"

coverage_base_xml="log_dir/coverage_base/coverage.xml"
coverage_base_data=$(get_coverage_data $coverage_base_xml)
read lines_base_covered lines_base_valid coverage_base_lines_rate branches_base_covered branches_base_valid coverage_base_branches_rate <<<"$coverage_base_data"

$BOLD_BLUE && echo "PR lines coverage: $lines_PR_covered/$lines_PR_valid ($coverage_PR_lines_rate%)" && $RESET
$BOLD_BLUE && echo "PR branches coverage: $branches_PR_covered/$branches_PR_valid ($coverage_PR_branches_rate%)" && $RESET
$BOLD_BLUE && echo "BASE lines coverage: $lines_base_covered/$lines_base_valid ($coverage_base_lines_rate%)" && $RESET
$BOLD_BLUE && echo "BASE branches coverage: $branches_base_covered/$branches_base_valid ($coverage_base_branches_rate%)" && $RESET

$BOLD_YELLOW && echo "clear upload path" && $RESET
rm -fr log_dir/coverage_PR/.coverage*
rm -fr log_dir/coverage_base/.coverage*
rm -fr log_dir/ut-coverage-*

# Declare an array to hold failed items
declare -a fail_items=()

if (( $(bc -l <<< "${coverage_PR_lines_rate}+0.05 < ${coverage_base_lines_rate}") )); then
fail_items+=("lines")
fi
if (( $(bc -l <<< "${coverage_PR_branches_rate}+0.05 < ${coverage_base_branches_rate}") )); then
fail_items+=("branches")
fi

if [[ ${#fail_items[@]} -ne 0 ]]; then
fail_items_str=$(
IFS=', '
echo "${fail_items[*]}"
)
for item in "${fail_items[@]}"; do
case "$item" in
lines)
decrease=$(echo $(printf "%.3f" $(echo "$coverage_PR_lines_rate - $coverage_base_lines_rate" | bc -l)))
;;
branches)
decrease=$(echo $(printf "%.3f" $(echo "$coverage_PR_branches_rate - $coverage_base_branches_rate" | bc -l)))
;;
*)
echo "Unknown item: $item"
continue
;;
esac
$BOLD_RED && echo "Unit Test failed with ${item} coverage decrease ${decrease}%" && $RESET
done
$BOLD_RED && echo "compare coverage to give detail info" && $RESET
bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
exit 1
else
$BOLD_GREEN && echo "Unit Test success with coverage lines: ${coverage_PR_lines_rate}%, branches: ${coverage_PR_branches_rate}%" && $RESET
$BOLD_GREEN && echo "compare coverage to give detail info" && $RESET
bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
fi
2 changes: 2 additions & 0 deletions .azure-pipelines/scripts/ut/coverage.file
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ omit =
*/neural_compressor/adaptor/tf_utils/quantize_graph/qdq/fuse_qdq_in.py
*/neural_compressor/adaptor/tf_utils/graph_rewriter/int8/freeze_value.py
*/neural_compressor/template/*
*/neural_compressor/common/*
*/neural_compressor/torch/*
exclude_lines =
pragma: no cover
raise NotImplementedError
Expand Down
15 changes: 15 additions & 0 deletions .azure-pipelines/scripts/ut/coverage.pt
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[run]
branch = True

[report]
include =
*/neural_compressor/common/*
*/neural_compressor/torch/*
exclude_lines =
pragma: no cover
raise NotImplementedError
raise TypeError
if self.device == "gpu":
if device == "gpu":
except ImportError:
except Exception as e:
35 changes: 35 additions & 0 deletions .azure-pipelines/scripts/ut/run_3x_pt.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/bin/bash
python -c "import neural_compressor as nc;print(nc.version.__version__)"
test_case="run 3x Torch"
echo "${test_case}"

# install requirements
echo "set up UT env..."
pip install -r /neural-compressor/requirements_pt.txt
pip install coverage
pip install pytest
pip list

export COVERAGE_RCFILE=/neural-compressor/.azure-pipelines/scripts/ut/coverage.pt
lpot_path=$(python -c 'import neural_compressor; import os; print(os.path.dirname(neural_compressor.__file__))')
cd /neural-compressor/test || exit 1
find ./3x/torch/* -name "test*.py" | sed 's,\.\/,coverage run --source='"${lpot_path}"' --append ,g' | sed 's/$/ --verbose/'> run.sh

LOG_DIR=/neural-compressor/log_dir
mkdir -p ${LOG_DIR}
ut_log_name=${LOG_DIR}/ut_3x_pt.log

echo "cat run.sh..."
sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
cp .coverage ${LOG_DIR}/.coverage

echo "------UT end -------"

if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
echo "UT finished successfully! "
1 change: 1 addition & 0 deletions .azure-pipelines/scripts/ut/run_basic_others.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ sed -i '/ mixed_precision/d' run.sh
sed -i '/ distillation\//d' run.sh
sed -i '/ scheduler\//d' run.sh
sed -i '/ nas\//d' run.sh
sed -i '/ 3x\//d' run.sh

echo "copy model for dynas..."
mkdir -p .torch/ofa_nets || true
Expand Down
108 changes: 108 additions & 0 deletions .azure-pipelines/ut-3x-pt.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
trigger: none

pr:
autoCancel: true
drafts: false
branches:
include:
- master
paths:
include:
- neural_compressor/common
- neural_compressor/torch
- test/3x/torch
- setup.py
- requirements.txt
- requirements_pt.txt
- .azure-pipelines/scripts/ut

pool: ICX-16C

variables:
IMAGE_NAME: "neural-compressor"
IMAGE_TAG: "py38"
UPLOAD_PATH: $(Build.SourcesDirectory)/log_dir
DOWNLOAD_PATH: $(Build.SourcesDirectory)/log_dir
ARTIFACT_NAME: "UT_coverage_report_3x_pt"
REPO: $(Build.Repository.Uri)

stages:
- stage: Torch
displayName: Unit Test 3x Torch
dependsOn: []
jobs:
- job:
displayName: Unit Test 3x Torch
steps:
- template: template/ut-template.yml
parameters:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_3x_pt"
uploadPath: $(UPLOAD_PATH)
utArtifact: "ut_coverage_3x"


- stage: Torch_baseline
displayName: Unit Test 3x Torch baseline
dependsOn: []
jobs:
- job:
displayName: Unit Test 3x Torch baseline
steps:
- template: template/ut-template.yml
parameters:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_3x_pt"
uploadPath: $(UPLOAD_PATH)
utArtifact: "ut_coverage_3x_baseline"
repo: $(REPO)

- stage: Coverage
displayName: "Coverage Combine"
pool:
vmImage: "ubuntu-latest"
dependsOn: [Torch, Torch_baseline]
jobs:
- job: CollectDatafiles
steps:
- script: |
if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then
docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} .
fi
docker images | grep -i ${IMAGE_NAME}
if [[ $? -ne 0 ]]; then
echo "NO Such Repo"
exit 1
fi
displayName: "Build develop docker image"
- task: DownloadPipelineArtifact@2
inputs:
artifact:
path: $(DOWNLOAD_PATH)

- script: |
echo "--- create container ---"
docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash
echo "--- docker ps ---"
docker ps
echo "--- collect logs ---"
docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \
&& bash install_nc.sh \
&& bash ut/collect_log_3x.sh pt"
displayName: "collect logs"
- task: PublishPipelineArtifact@1
condition: succeededOrFailed()
inputs:
targetPath: $(UPLOAD_PATH)
artifact: $(ARTIFACT_NAME)
publishLocation: "pipeline"

- task: Bash@3
condition: always()
inputs:
targetType: "inline"
script: |
docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true"
displayName: "Docker clean up"
3 changes: 3 additions & 0 deletions .azure-pipelines/ut-basic-no-cover.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ pr:
- .azure-pipelines/scripts/ut
exclude:
- test/neural_coder
- test/3x
- neural_compressor/common
- neural_compressor/torch

pool: ICX-16C

Expand Down
3 changes: 3 additions & 0 deletions .azure-pipelines/ut-basic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ pr:
- .azure-pipelines/scripts/ut
exclude:
- test/neural_coder
- test/3x
- neural_compressor/common
- neural_compressor/torch

pool: ICX-16C

Expand Down
13 changes: 13 additions & 0 deletions neural_compressor/common/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright (c) 2023 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Loading

0 comments on commit dc9328c

Please sign in to comment.