Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[API] Multiple report file parsing support #439

Open
wants to merge 5 commits into
base: tvm
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 69 additions & 11 deletions python/heterocl/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
import time
import xmltodict
import pandas as pd
# Support for graphical display of the report
#import matplotlib.pyplot as plt
from .report_config import RptSetup
from tabulate import tabulate
from .schedule import Stage
Expand Down Expand Up @@ -57,6 +55,10 @@ class Displayer(object):

get_max(col)
Sort the latency in a decreasing order for specific latency category.

add_fields(val)
Dictionary representation of the Displayer object's member of
report files.

display(loops=None, level=None, cols=None)
Display the report table with appropriate query arguments.
Expand Down Expand Up @@ -373,6 +375,31 @@ def get_max(self, col):
tup_lst = list(map(lambda x: (x[0], x[1], x[2].count('+')), tup_lst))
return list(reversed(sorted(tup_lst, key=lambda x: int(x[1]))))

def add_fields(self, val):
"""Append additional data present in separate report files.

Parameters
----------
val: dict
Dictionary representation of the Displayer object's member
of report files.

Returns
----------
None
"""
self._category = val['_category']
self._category_aux = val['_category_aux']
self._loop_name += val['_loop_name']
self._loop_name_aux += val['_loop_name_aux']
self._max_level = val['_max_level'] if val['_max_level'] > self._max_level else self._max_level

for key in self._category_aux:
try:
self._data[key] += val['_data'][key]
except:
self._data[key] = val['_data'][key]

def display(self, loops=None, level=None, cols=None):
"""Display the report file.

Expand Down Expand Up @@ -431,7 +458,7 @@ def display(self, loops=None, level=None, cols=None):
print('* Units in {}'.format(self.unit))
splt = df.loc[rows, cols].to_string().split("\n")
pd.set_option('max_colwidth', len(splt[0]) * 100)
return df.loc[rows, cols].to_string()
return df.loc[rows, cols].to_string()

def parse_js(path, print_flag=False):
js_file = os.path.join(path, "kernel/reports/lib/report_data.js")
Expand All @@ -456,17 +483,25 @@ def parse_js(path, print_flag=False):
def parse_xml(path, xml_path, prod_name, print_flag=False):
xml_file = os.path.join(path, xml_path)

# Collect files other than the main one.
p = xml_file.rsplit('/', 1)[0]
other_xml_file = []
for file in os.listdir(p):
if file.endswith("_csynth.xml") and file != "test_csynth.xml":
fpath = os.path.join(p, file)
other_xml_file.append(fpath)

# Display the general information in `test_csynth.xml`
if not os.path.isfile(xml_file):
raise RuntimeError("Cannot find {}, run csyn first".format(xml_file))
json_file = os.path.join(path,"report.json")
outfile = open(json_file, "w")
with open(xml_file, "r") as xml:
profile = xmltodict.parse(xml.read())["profile"]
json.dump(profile, outfile, indent=2)

config = RptSetup(profile, prod_name)
config.eval_members()

res = {}
res["HLS Version"] = config.prod_name + " " + config.version
res["Product family"] = config.prod_family
Expand Down Expand Up @@ -504,17 +539,40 @@ def parse_xml(path, xml_path, prod_name, print_flag=False):
tablestr.insert(5, splitline)
table = '\n'.join(tablestr)

# Latency information extraction
clock_unit = config.performance_unit

# Parse latency information in the main report file (if it exists)
summary = config.loop_latency
out_info_table = Displayer(clock_unit)
try:
out_info_table.init_table(summary)
out_info_table.collect_data(summary)
except:
pass

# Latency information extraction
for lat_xml_file in other_xml_file:
with open(lat_xml_file, "r") as xml:
profile = xmltodict.parse(xml.read())["profile"]

config = RptSetup(profile, prod_name)
config.eval_members()

summary = config.loop_latency

info_table = Displayer(clock_unit)
info_table.init_table(summary)
info_table.collect_data(summary)
try:
info_table = Displayer(clock_unit)
info_table.init_table(summary)
info_table.collect_data(summary)
res = vars(info_table)
out_info_table.add_fields(res)
except:
print("Report for issue")
pass

if print_flag:
print(table)
return info_table
print(table)
return out_info_table

def report_stats(target, folder):
path = folder
Expand Down
3 changes: 1 addition & 2 deletions python/heterocl/report_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ def _lookup(self, keys):
try:
return reduce(operator.getitem, keys, self.profile)
except KeyError:
print("Invalid key")
raise
pass

def eval_members(self):
"""Initialize each attribute to appropriate values.
Expand Down
110 changes: 106 additions & 4 deletions tests/test_hls_report.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import heterocl as hcl
import numpy as np
import os
import re
import json
import xmltodict
Expand Down Expand Up @@ -92,6 +93,33 @@ def spam_filter():
pass
# END TODO

def stages():

A = hcl.placeholder((32, 32), "A")
C = hcl.placeholder((32, 32), "C")
def kernel(A, C):
B = hcl.compute(A.shape, lambda i, j : A[i, j] + 1, "B")
D = hcl.compute(A.shape, lambda i, j : B[i, j] + 1, "D")
E = hcl.compute(A.shape, lambda i, j : C[i, j] + 1, "E")
F = hcl.compute(A.shape, lambda i, j : D[i, j] + E[i, j], "F")
return F

target = hcl.Platform.xilinx_zc706
target.config(compiler="vivado_hls", mode="csyn", project="stages-tvm.prj")
s = hcl.create_schedule([A, C], kernel)
s.to(kernel.B, s[kernel.D])
s.to(kernel.D, s[kernel.F])
s.to(kernel.E, s[kernel.F])
mod = hcl.build(s, target=target)
np_A = np.zeros((32, 32))
np_C = np.zeros((32, 32))
np_F = np.zeros((32, 32))
hcl_A = hcl.asarray(np_A)
hcl_C = hcl.asarray(np_C)
hcl_F = hcl.asarray(np_F)
mod(hcl_A, hcl_C, hcl_F)
return mod.report()

def refine(res_tbl):
lst = res_tbl.split("\n")
pattern = re.compile(r'\s\s+')
Expand Down Expand Up @@ -119,11 +147,33 @@ def get_rpt(config):
with open(xml_file, "r") as xml:
profile = xmltodict.parse(xml.read())["profile"]
clock_unit = profile["PerformanceEstimates"]["SummaryOfOverallLatency"]["unit"]
summary = profile["PerformanceEstimates"]["SummaryOfLoopLatency"]


rpt = hcl.report.Displayer(clock_unit)
rpt.init_table(summary)
rpt.collect_data(summary)
try:
summary = profile["PerformanceEstimates"]["SummaryOfLoopLatency"]
rpt.init_table(summary)
rpt.collect_data(summary)
except:
pass

path = config['algorithm']['data_path']
if path:
other_rpt = []
path = str(pathlib.Path(__file__).parent.absolute()) + path
for file in os.listdir(path):
if file.endswith("_csynth.xml") and file != "test_csynth.xml":
fpath = os.path.join(path, file)
other_rpt.append(fpath)

for files in other_rpt:
with open(files, "r") as xml:
profile = xmltodict.parse(xml.read())["profile"]
summary = profile["PerformanceEstimates"]["SummaryOfLoopLatency"]
rpt_inner = hcl.report.Displayer(clock_unit)
rpt_inner.init_table(summary)
rpt_inner.collect_data(summary)
res = vars(rpt_inner)
rpt.add_fields(res)
return rpt

def _test_rpt(config):
Expand Down Expand Up @@ -212,6 +262,7 @@ def test_knn_digitrec(vhls):
'has_algorithm' : 1,
'algorithm' : {
'report_path' : '/test_report_data/digitrec_report.xml',
'data_path' : '',
'name' : 'knn_digitrec'
},
'get_max' : 'Latency',
Expand Down Expand Up @@ -253,6 +304,7 @@ def test_kmeans(vhls):
'has_algorithm' : 1,
'algorithm' : {
'report_path' : '/test_report_data/kmeans_report.xml',
'data_path' : '',
'name' : 'kmeans'
},
'get_max' : 'Absolute Time Latency',
Expand Down Expand Up @@ -294,6 +346,7 @@ def test_sobel(vhls):
'has_algorithm' : 0,
'algorithm' : {
'report_path' : '/test_report_data/sobel_report.xml',
'data_path' : '',
'name' : 'sobel'
},
'get_max' : 'Latency',
Expand Down Expand Up @@ -336,6 +389,7 @@ def test_sobel_partial(vhls):
'has_algorithm' : 0,
'algorithm' : {
'report_path' : '/test_report_data/sobel_report_partial.xml',
'data_path' : '',
'name' : 'sobel_partial'
},
'get_max' : 'Latency',
Expand Down Expand Up @@ -378,6 +432,7 @@ def test_canny(vhls):
'has_algorithm' : 0,
'algorithm' : {
'report_path' : '/test_report_data/canny_report.xml',
'data_path' : '',
'name' : 'canny'
},
'get_max' : 'Max Latency',
Expand Down Expand Up @@ -421,6 +476,7 @@ def test_spam_filter(vhls):
'has_algorithm' : 0,
'algorithm' : {
'report_path' : '/test_report_data/spam_filter_report.xml',
'data_path' : '',
'name' : 'spam_filter'
},
'get_max' : 'Latency',
Expand Down Expand Up @@ -457,10 +513,56 @@ def test_spam_filter(vhls):
}
_test_rpt(config)

def test_multi_rpt(vhls):
# `report_path` can also be switched into
# `/test_report_data/multi_report.xml`
config = {
'vhls' : vhls,
'has_algorithm' : 0,
'algorithm' : {
'report_path' : '/test_report_data/stages_report/test_csynth.xml',
'data_path' : '/test_report_data/stages_report/',
'name' : 'stages'
},
'get_max' : 'Latency',
'col' : 'Category',
'info' : 'NoQuery',
'loop_query' : {
'query' : ['B', 'F'],
'name' : 'LoopQuery'
},
'column_query' : {
'query' : ['Trip Count', 'Latency', 'Iteration Latency',
'Pipeline II', 'Pipeline Depth'],
'name' : 'ColumnQuery'
},
'level_query' : {
'val' : 0,
'name' : 'LevelQuery'
},
'level_out_of_bound' : {
'val' : [5, -2],
'name' : 'LevelQueryOOB'
},
'multi_query' : {
'row_query' : ['D'],
'level_query' : 1,
'name' : 'MultiQuery'
},
'all_query' : {
'row_query' : ['B', 'E'],
'col_query' : ['Latency'],
'level_query' : 0,
'name' : 'AllQuery'
}
}
_test_rpt(config)

if __name__ == '__main__':
test_knn_digitrec(False)
test_kmeans(False)
test_sobel(False)
test_sobel_partial(False)
test_canny(False)
test_spam_filter(False)
test_multi_rpt(False)
11 changes: 11 additions & 0 deletions tests/test_report_data/expected.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,5 +64,16 @@
"LevelQueryOOB" : ["Trip Count, Latency Iteration Latency Pipeline II Pipeline Depth", "outer_loop_x_outer_loop_y, 22500, 92610000, 4116, N/A, N/A", "+ data_local_x_outer_data_local_x_inner, 1024, 1026, N/A, 1, 4", "+ dot_product_loop_x_outer1_dot_product_loop_x_inner1, 1024, 1027, N/A, 1, 5", "+ grad_x_outer2_grad_x_inner2, 1024, 1026, N/A, 1, 4", "+ update_param_loop_x_outer3_update_param_loop_x_inner3, 1024, 1026, N/A, 1, 4"],
"MultiQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "+ update_param_loop_x_outer3_update_param_loop_x_inner3, 1024, 1026, N/A, 1, 4"],
"AllQuery" : ["Latency", "+ dot_product_loop_x_outer1_dot_product_loop_x_inner1, 1027"]
},
"stages" : {
"GetMax" : {"E_i2": {"3136": 0}, "B_i": {"3136": 0}, "F_i3": {"2112": 0}, "D_i1": {"2112": 0}, "E_j2": {"96": 1}, "B_j": {"96": 1}, "F_j3": {"64": 1}, "D_j1": {"64": 1}},
"Category" : "Trip Count Latency Iteration Latency Pipeline II Pipeline Depth",
"NoQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "B_i, 32, 3136, 98, N/A, N/A", "+ B_j, 32, 96, 3, N/A, N/A", "D_i1, 32, 2112, 66, N/A, N/A", "+ D_j1, 32, 64, 2, N/A, N/A", "E_i2, 32, 3136, 98, N/A, N/A", "+ E_j2, 32, 96, 3, N/A, N/A", "F_i3, 32, 2112, 66, N/A, N/A", "+ F_j3, 32, 64, 2, N/A, N/A"],
"LoopQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "B_i, 32, 3136, 98, N/A, N/A", "+ B_j, 32, 96, 3, N/A, N/A", "F_i3, 32, 2112, 66, N/A, N/A", "+ F_j3, 32, 64, 2, N/A, N/A"],
"ColumnQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "B_i, 32, 3136, 98, N/A, N/A", "+ B_j, 32, 96, 3, N/A, N/A", "D_i1, 32, 2112, 66, N/A, N/A", "+ D_j1, 32, 64, 2, N/A, N/A", "E_i2, 32, 3136, 98, N/A, N/A", "+ E_j2, 32, 96, 3, N/A, N/A", "F_i3, 32, 2112, 66, N/A, N/A", "+ F_j3, 32, 64, 2, N/A, N/A"],
"LevelQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "B_i, 32, 3136, 98, N/A, N/A", "D_i1, 32, 2112, 66, N/A, N/A", "E_i2, 32, 3136, 98, N/A, N/A", "F_i3, 32, 2112, 66, N/A, N/A"],
"LevelQueryOOB" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "B_i, 32, 3136, 98, N/A, N/A", "+ B_j, 32, 96, 3, N/A, N/A", "D_i1, 32, 2112, 66, N/A, N/A", "+ D_j1, 32, 64, 2, N/A, N/A", "E_i2, 32, 3136, 98, N/A, N/A", "+ E_j2, 32, 96, 3, N/A, N/A", "F_i3, 32, 2112, 66, N/A, N/A", "+ F_j3, 32, 64, 2, N/A, N/A"],
"MultiQuery" : ["Trip Count Latency Iteration Latency Pipeline II Pipeline Depth", "D_i1, 32, 2112, 66, N/A, N/A", "+ D_j1, 32, 64, 2, N/A, N/A"],
"AllQuery" : ["Latency", "B_i, 3136", "E_i2, 3136"]
}
}
Loading