Skip to content

Commit

Permalink
Add trap flow counter support
Browse files Browse the repository at this point in the history
  • Loading branch information
Junchao-Mellanox committed Sep 24, 2021
1 parent c0b9917 commit a35423e
Show file tree
Hide file tree
Showing 13 changed files with 528 additions and 3 deletions.
8 changes: 8 additions & 0 deletions clear/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,14 @@ def statistics(db):
def remap_keys(dict):
return [{'key': k, 'value': v} for k, v in dict.items()]

# ("sonic-clear flowcnt-trap")
@cli.command()
def flowcnt_trap():
""" Clear trap flow counters """
command = "flow_counters_stat -c -t trap"
run_command(command)


# Load plugins and register them
helper = util_base.UtilHelper()
for plugin in helper.load_plugins(plugins):
Expand Down
40 changes: 38 additions & 2 deletions counterpoll/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ def disable():
# Port counter commands
@cli.group()
def port():
""" Queue counter commands """
""" Port counter commands """

@port.command()
@click.argument('poll_interval', type=click.IntRange(100, 30000))
def interval(poll_interval):
""" Set queue counter query interval """
""" Set port counter query interval """
configdb = ConfigDBConnector()
configdb.connect()
port_info = {}
Expand Down Expand Up @@ -241,6 +241,39 @@ def disable():
configdb.mod_entry("FLEX_COUNTER_TABLE", "PG_WATERMARK", fc_info)
configdb.mod_entry("FLEX_COUNTER_TABLE", BUFFER_POOL_WATERMARK, fc_info)

# Trap flow counter commands
@cli.group()
@click.pass_context
def flowcnt_trap(ctx):
""" Trap flow counter commands """
ctx.obj = ConfigDBConnector()
ctx.obj.connect()

@flowcnt_trap.command()
@click.argument('poll_interval', type=click.IntRange(1000, 30000))
@click.pass_context
def interval(ctx, poll_interval):
""" Set trap flow counter query interval """
fc_info = {}
fc_info['POLL_INTERVAL'] = poll_interval
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)

@flowcnt_trap.command()
@click.pass_context
def enable(ctx):
""" Enable trap flow counter query """
fc_info = {}
fc_info['FLEX_COUNTER_STATUS'] = 'enable'
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)

@flowcnt_trap.command()
@click.pass_context
def disable(ctx):
""" Disable trap flow counter query """
fc_info = {}
fc_info['FLEX_COUNTER_STATUS'] = 'disable'
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)

@cli.command()
def show():
""" Show the counter configuration """
Expand All @@ -254,6 +287,7 @@ def show():
pg_wm_info = configdb.get_entry('FLEX_COUNTER_TABLE', 'PG_WATERMARK')
pg_drop_info = configdb.get_entry('FLEX_COUNTER_TABLE', PG_DROP)
buffer_pool_wm_info = configdb.get_entry('FLEX_COUNTER_TABLE', BUFFER_POOL_WATERMARK)
trap_info = configdb.get_entry('FLEX_COUNTER_TABLE', 'FLOW_CNT_TRAP')

header = ("Type", "Interval (in ms)", "Status")
data = []
Expand All @@ -273,6 +307,8 @@ def show():
data.append(['PG_DROP_STAT', pg_drop_info.get("POLL_INTERVAL", DEFLT_10_SEC), pg_drop_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if buffer_pool_wm_info:
data.append(["BUFFER_POOL_WATERMARK_STAT", buffer_pool_wm_info.get("POLL_INTERVAL", DEFLT_10_SEC), buffer_pool_wm_info.get("FLEX_COUNTER_STATUS", DISABLE)])
if trap_info:
data.append(["FLOW_CNT_TRAP_STAT", trap_info.get("POLL_INTERVAL", DEFLT_1_SEC), trap_info.get("FLEX_COUNTER_STATUS", DISABLE)])

click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))

Expand Down
210 changes: 210 additions & 0 deletions scripts/flow_counters_stat
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
#!/usr/bin/env python3

import argparse
import os
import _pickle as pickle
import sys

from natsort import natsorted
from tabulate import tabulate

# mock the redis for unit test purposes #
try:
if os.environ["UTILITIES_UNIT_TESTING"] == "2":
modules_path = os.path.join(os.path.dirname(__file__), "..")
tests_path = os.path.join(modules_path, "tests")
sys.path.insert(0, modules_path)
sys.path.insert(0, tests_path)
import mock_tables.dbconnector
if os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] == "multi_asic":
import mock_tables.mock_multi_asic
mock_tables.dbconnector.load_namespace_config()

except KeyError:
pass

import utilities_common.multi_asic as multi_asic_util
from utilities_common.netstat import format_number_with_comma, table_as_json, ns_diff, format_prate

# Flow counter meta data, new type of flow counters can extend this dictinary to reuse existing logic
flow_counter_meta = {
'trap': {
'headers': ['Trap Name', 'Packets', 'Bytes', 'PPS'],
'name_map': 'COUNTERS_TRAP_NAME_MAP',
}
}
flow_counters_fields = ['SAI_COUNTER_STAT_PACKETS', 'SAI_COUNTER_STAT_BYTES']

# Only do diff for 'Packets' and 'Bytes'
diff_column_positions = set([0, 1])

FLOW_COUNTER_TABLE_PREFIX = "COUNTERS:"
RATES_TABLE_PREFIX = 'RATES:'
PPS_FIELD = 'RX_PPS'
STATUS_NA = 'N/A'


class FlowCounterStats(object):
def __init__(self, args):
self.db = None
self.multi_asic = multi_asic_util.MultiAsic(namespace_option=args.namespace)
self.args = args
meta_data = flow_counter_meta[args.type]
self.name_map = meta_data['name_map']
self.headers = meta_data['headers']
self.data_file = os.path.join('/tmp/{}-stats-{}'.format(args.type, os.getuid()))
if self.args.delete and os.path.exists(self.data_file):
os.remove(self.data_file)
self.data = {}

def show(self):
"""Show flow counter statistic
"""
self._collect()
old_data = self._load()
self._diff(old_data, self.data)

table = []
if self.multi_asic.is_multi_asic:
# On multi ASIC platform, an extra column "ASIC ID" must be present to avoid duplicate entry name
headers = ['ASIC ID'] + self.headers
for ns, stats in natsorted(self.data.items()):
for name, values in natsorted(stats.items()):
row = [ns, name, format_number_with_comma(values[0]), format_number_with_comma(values[1]), format_prate(values[2])]
table.append(row)
else:
headers = self.headers
for ns, stats in natsorted(self.data.items()):
for name, values in natsorted(stats.items()):
row = [name, format_number_with_comma(values[0]), format_number_with_comma(values[1]), format_prate(values[2])]
table.append(row)
if self.args.json:
print(table_as_json(table, headers))
else:
print(tabulate(table, headers, tablefmt='simple', stralign='right'))

def clear(self):
"""Clear flow counter statistic. This function does not clear data from ASIC. Instead, it saves flow counter statistic to a file. When user
issue show command after clear, it does a diff between new data and saved data.
"""
self._collect()
self._save()
print('Flow Counters were successfully cleared')

@multi_asic_util.run_on_multi_asic
def _collect(self):
"""Collect flow counter statistic from DB. This function is called on a multi ASIC context.
"""
self.data.update(self._get_stats_from_db())

def _get_stats_from_db(self):
"""Get flow counter statistic from DB.
Returns:
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>]}}
"""
ns = self.multi_asic.current_namespace
name_map = self.db.get_all(self.db.COUNTERS_DB, self.name_map)
data = {ns: {}}
if not name_map:
return data

for name, counter_oid in name_map.items():
values = []
full_table_id = FLOW_COUNTER_TABLE_PREFIX + counter_oid
for field in flow_counters_fields:
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, field)
values.append(STATUS_NA if counter_data is None else counter_data)

full_table_id = RATES_TABLE_PREFIX + counter_oid
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, PPS_FIELD)
values.append(STATUS_NA if counter_data is None else counter_data)
values.append(counter_oid)
data[ns][name] = values
return data

def _save(self):
"""Save flow counter statistic to a file
"""
try:
if os.path.exists(self.data_file):
os.remove(self.data_file)

with open(self.data_file, 'wb') as f:
pickle.dump(self.data, f)
except IOError as e:
print('Failed to save statistic - {}'.format(repr(e)))

def _load(self):
"""Load flow counter statistic from a file
Returns:
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>]}}
"""
if not os.path.exists(self.data_file):
return None

try:
with open(self.data_file, 'rb') as f:
data = pickle.load(f)
except IOError as e:
print('Failed to load statistic - {}'.format(repr(e)))
return None

return data

def _diff(self, old_data, new_data):
"""Do a diff between new data and old data.
Args:
old_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>]}}
new_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>]}}
"""
if not old_data:
return

for ns, stats in new_data.items():
if ns not in old_data:
continue
old_stats = old_data[ns]
for name, values in stats.items():
if name not in old_stats:
continue

old_values = old_stats[name]
if values[-1] != old_values[-1]:
# Counter OID not equal means the trap was removed and added again. Removing a trap would cause
# the stats value restart from 0. To avoid get minus value here, it should not do diff in case
# counter OID is changed.
continue

for i in diff_column_positions:
values[i] = ns_diff(values[i], old_values[i])


def main():
parser = argparse.ArgumentParser(description='Display the flow counters',
formatter_class=argparse.RawTextHelpFormatter,
epilog="""
Examples:
flow_counters_stat -c -t trap
flow_counters_stat -t trap
flow_counters_stat -d -t trap
""")
parser.add_argument('-c', '--clear', action='store_true', help='Copy & clear stats')
parser.add_argument('-d', '--delete', action='store_true', help='Delete saved stats')
parser.add_argument('-j', '--json', action='store_true', help='Display in JSON format')
parser.add_argument('-n','--namespace', default=None, help='Display flow counters for specific namespace')
parser.add_argument('-t', '--type', required=True, choices=['trap'],help='Flow counters type')

args = parser.parse_args()

stats = FlowCounterStats(args)
if args.clear:
stats.clear()
else:
stats.show()


if __name__ == '__main__':
main()
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
'scripts/fast-reboot-dump.py',
'scripts/fdbclear',
'scripts/fdbshow',
'scripts/flow_counters_stat',
'scripts/gearboxutil',
'scripts/generate_dump',
'scripts/generate_shutdown_order.py',
Expand Down
22 changes: 22 additions & 0 deletions show/flow_counters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import click
import utilities_common.cli as clicommon
import utilities_common.multi_asic as multi_asic_util

#
# 'flowcnt-trap' group ###
#

@click.group(cls=clicommon.AliasedGroup)
def flowcnt_trap():
"""Show trap flow counter related information"""
pass

@flowcnt_trap.command()
@click.option('--verbose', is_flag=True, help="Enable verbose output")
@click.option('--namespace', '-n', 'namespace', default=None, type=click.Choice(multi_asic_util.multi_asic_ns_choices()), show_default=True, help='Namespace name or all')
def stats(verbose, namespace):
"""Show trap flow counter statistic"""
cmd = "flow_counters_stat -t trap"
if namespace is not None:
cmd += " -n {}".format(namespace)
clicommon.run_command(cmd, display_cmd=verbose)
2 changes: 2 additions & 0 deletions show/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from . import dropcounters
from . import feature
from . import fgnhg
from . import flow_counters
from . import gearbox
from . import interfaces
from . import kdump
Expand Down Expand Up @@ -179,6 +180,7 @@ def cli(ctx):
cli.add_command(dropcounters.dropcounters)
cli.add_command(feature.feature)
cli.add_command(fgnhg.fgnhg)
cli.add_command(flow_counters.flowcnt_trap)
cli.add_command(kdump.kdump)
cli.add_command(interfaces.interfaces)
cli.add_command(kdump.kdump)
Expand Down
3 changes: 3 additions & 0 deletions tests/counterpoll_input/config_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -784,6 +784,9 @@
},
"PORT": {
"FLEX_COUNTER_STATUS": "enable"
},
"FLOW_CNT_TRAP": {
"FLEX_COUNTER_STATUS": "enable"
}
},
"PORT": {
Expand Down
Loading

0 comments on commit a35423e

Please sign in to comment.