-
Notifications
You must be signed in to change notification settings - Fork 684
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add trap flow counter support #1868
Merged
prsunny
merged 4 commits into
sonic-net:master
from
Junchao-Mellanox:flow-counter-review
Nov 25, 2021
Merged
Changes from all commits
Commits
Show all changes
4 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,283 @@ | ||
#!/usr/bin/env python3 | ||
|
||
import argparse | ||
import os | ||
import _pickle as pickle | ||
import sys | ||
|
||
from natsort import natsorted | ||
from tabulate import tabulate | ||
|
||
# mock the redis for unit test purposes # | ||
try: | ||
if os.environ["UTILITIES_UNIT_TESTING"] == "2": | ||
modules_path = os.path.join(os.path.dirname(__file__), "..") | ||
tests_path = os.path.join(modules_path, "tests") | ||
sys.path.insert(0, modules_path) | ||
sys.path.insert(0, tests_path) | ||
import mock_tables.dbconnector | ||
if os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] == "multi_asic": | ||
import mock_tables.mock_multi_asic | ||
mock_tables.dbconnector.load_namespace_config() | ||
|
||
except KeyError: | ||
pass | ||
|
||
import utilities_common.multi_asic as multi_asic_util | ||
from utilities_common.netstat import format_number_with_comma, table_as_json, ns_diff, format_prate | ||
|
||
# Flow counter meta data, new type of flow counters can extend this dictinary to reuse existing logic | ||
flow_counter_meta = { | ||
'trap': { | ||
'headers': ['Trap Name', 'Packets', 'Bytes', 'PPS'], | ||
'name_map': 'COUNTERS_TRAP_NAME_MAP', | ||
} | ||
} | ||
flow_counters_fields = ['SAI_COUNTER_STAT_PACKETS', 'SAI_COUNTER_STAT_BYTES'] | ||
|
||
# Only do diff for 'Packets' and 'Bytes' | ||
diff_column_positions = set([0, 1]) | ||
|
||
FLOW_COUNTER_TABLE_PREFIX = "COUNTERS:" | ||
RATES_TABLE_PREFIX = 'RATES:' | ||
PPS_FIELD = 'RX_PPS' | ||
STATUS_NA = 'N/A' | ||
|
||
|
||
class FlowCounterStats(object): | ||
def __init__(self, args): | ||
self.db = None | ||
self.multi_asic = multi_asic_util.MultiAsic(namespace_option=args.namespace) | ||
self.args = args | ||
meta_data = flow_counter_meta[args.type] | ||
self.name_map = meta_data['name_map'] | ||
self.headers = meta_data['headers'] | ||
self.data_file = os.path.join('/tmp/{}-stats-{}'.format(args.type, os.getuid())) | ||
if self.args.delete and os.path.exists(self.data_file): | ||
os.remove(self.data_file) | ||
self.data = {} | ||
|
||
def show(self): | ||
"""Show flow counter statistic | ||
""" | ||
self._collect_and_diff() | ||
headers, table = self._prepare_show_data() | ||
self._print_data(headers, table) | ||
|
||
def _collect_and_diff(self): | ||
"""Collect statistic from db and diff from old data if any | ||
""" | ||
self._collect() | ||
old_data = self._load() | ||
need_update_cache = self._diff(old_data, self.data) | ||
if need_update_cache: | ||
self._save(old_data) | ||
|
||
def _adjust_headers(self, headers): | ||
"""Adjust table headers based on platforms | ||
Args: | ||
headers (list): Original headers | ||
Returns: | ||
headers (list): Headers with 'ASIC ID' column if it is a multi ASIC platform | ||
""" | ||
return ['ASIC ID'] + headers if self.multi_asic.is_multi_asic else headers | ||
|
||
def _prepare_show_data(self): | ||
"""Prepare headers and table data for output | ||
Returns: | ||
headers (list): Table headers | ||
table (list): Table data | ||
""" | ||
table = [] | ||
headers = self._adjust_headers(self.headers) | ||
|
||
for ns, stats in natsorted(self.data.items()): | ||
if self.args.namespace is not None and self.args.namespace != ns: | ||
continue | ||
for name, values in natsorted(stats.items()): | ||
if self.multi_asic.is_multi_asic: | ||
row = [ns] | ||
else: | ||
row = [] | ||
row.extend([name, format_number_with_comma(values[0]), format_number_with_comma(values[1]), format_prate(values[2])]) | ||
table.append(row) | ||
|
||
return headers, table | ||
|
||
def _print_data(self, headers, table): | ||
"""Print statistic data based on output format | ||
Args: | ||
headers (list): Table headers | ||
table (list): Table data | ||
""" | ||
if self.args.json: | ||
print(table_as_json(table, headers)) | ||
else: | ||
print(tabulate(table, headers, tablefmt='simple', stralign='right')) | ||
|
||
def clear(self): | ||
"""Clear flow counter statistic. This function does not clear data from ASIC. Instead, it saves flow counter statistic to a file. When user | ||
issue show command after clear, it does a diff between new data and saved data. | ||
""" | ||
self._collect() | ||
self._save(self.data) | ||
print('Flow Counters were successfully cleared') | ||
|
||
@multi_asic_util.run_on_multi_asic | ||
def _collect(self): | ||
"""Collect flow counter statistic from DB. This function is called on a multi ASIC context. | ||
""" | ||
self.data.update(self._get_stats_from_db()) | ||
|
||
def _get_stats_from_db(self): | ||
"""Get flow counter statistic from DB. | ||
Returns: | ||
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}} | ||
""" | ||
ns = self.multi_asic.current_namespace | ||
name_map = self.db.get_all(self.db.COUNTERS_DB, self.name_map) | ||
data = {ns: {}} | ||
if not name_map: | ||
return data | ||
|
||
for name, counter_oid in name_map.items(): | ||
values = self._get_stats_value(counter_oid) | ||
|
||
full_table_id = RATES_TABLE_PREFIX + counter_oid | ||
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, PPS_FIELD) | ||
values.append(STATUS_NA if counter_data is None else counter_data) | ||
values.append(counter_oid) | ||
data[ns][name] = values | ||
return data | ||
|
||
def _get_stats_value(self, counter_oid): | ||
"""Get statistic value from COUNTERS_DB COUNTERS table | ||
Args: | ||
counter_oid (string): OID of a generic counter | ||
Returns: | ||
values (list): A list of statistics value | ||
""" | ||
values = [] | ||
full_table_id = FLOW_COUNTER_TABLE_PREFIX + counter_oid | ||
for field in flow_counters_fields: | ||
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, field) | ||
values.append(STATUS_NA if counter_data is None else counter_data) | ||
return values | ||
|
||
def _save(self, data): | ||
"""Save flow counter statistic to a file | ||
""" | ||
try: | ||
if os.path.exists(self.data_file): | ||
os.remove(self.data_file) | ||
|
||
with open(self.data_file, 'wb') as f: | ||
pickle.dump(data, f) | ||
except IOError as e: | ||
print('Failed to save statistic - {}'.format(repr(e))) | ||
|
||
def _load(self): | ||
"""Load flow counter statistic from a file | ||
Returns: | ||
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}} | ||
""" | ||
if not os.path.exists(self.data_file): | ||
return None | ||
|
||
try: | ||
with open(self.data_file, 'rb') as f: | ||
data = pickle.load(f) | ||
except IOError as e: | ||
print('Failed to load statistic - {}'.format(repr(e))) | ||
return None | ||
|
||
return data | ||
|
||
def _diff(self, old_data, new_data): | ||
"""Do a diff between new data and old data. | ||
Args: | ||
old_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}} | ||
new_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}} | ||
Returns: | ||
bool: True if cache need to be updated | ||
""" | ||
if not old_data: | ||
return False | ||
|
||
need_update_cache = False | ||
for ns, stats in new_data.items(): | ||
if ns not in old_data: | ||
continue | ||
old_stats = old_data[ns] | ||
for name, values in stats.items(): | ||
if name not in old_stats: | ||
continue | ||
|
||
old_values = old_stats[name] | ||
if values[-1] != old_values[-1]: | ||
# Counter OID not equal means the trap was removed and added again. Removing a trap would cause | ||
# the stats value restart from 0. To avoid get minus value here, it should not do diff in case | ||
# counter OID is changed. | ||
old_values[-1] = values[-1] | ||
for i in diff_column_positions: | ||
old_values[i] = 0 | ||
values[i] = ns_diff(values[i], old_values[i]) | ||
need_update_cache = True | ||
continue | ||
|
||
has_negative_diff = False | ||
for i in diff_column_positions: | ||
# If any diff has negative value, set all counter values to 0 and update cache | ||
if values[i] < old_values[i]: | ||
has_negative_diff = True | ||
break | ||
|
||
if has_negative_diff: | ||
for i in diff_column_positions: | ||
old_values[i] = 0 | ||
values[i] = ns_diff(values[i], old_values[i]) | ||
need_update_cache = True | ||
continue | ||
|
||
for i in diff_column_positions: | ||
values[i] = ns_diff(values[i], old_values[i]) | ||
|
||
return need_update_cache | ||
|
||
|
||
def main(): | ||
parser = argparse.ArgumentParser(description='Display the flow counters', | ||
formatter_class=argparse.RawTextHelpFormatter, | ||
epilog=""" | ||
Examples: | ||
flow_counters_stat -c -t trap | ||
flow_counters_stat -t trap | ||
flow_counters_stat -d -t trap | ||
""") | ||
parser.add_argument('-c', '--clear', action='store_true', help='Copy & clear stats') | ||
parser.add_argument('-d', '--delete', action='store_true', help='Delete saved stats') | ||
parser.add_argument('-j', '--json', action='store_true', help='Display in JSON format') | ||
parser.add_argument('-n','--namespace', default=None, help='Display flow counters for specific namespace') | ||
parser.add_argument('-t', '--type', required=True, choices=['trap'],help='Flow counters type') | ||
|
||
args = parser.parse_args() | ||
|
||
stats = FlowCounterStats(args) | ||
if args.clear: | ||
stats.clear() | ||
else: | ||
stats.show() | ||
|
||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This is a good logic to handle. However, what if OID returned is the same as before delete? Should we rather not check if the diff is negative and consider it as a case where trap got deleted?