Skip to content

Commit

Permalink
feat(server): Store checker list as analysis-info
Browse files Browse the repository at this point in the history
This patch allows users to gather whether a checker was enabled or
disabled during the analysis, irrespective of whether a checker produced
any reports (which might have been deleted from the server since!). This
improves auditing capabilities as the definite knowledge on which
checkers were _available_ is kept in the database, and not lost after
the analysis temporaries are cleaned up from the analysing client.

Features:

 - Create a new table, `checkers`, to store unique ID (per product
   database) for a checker's name.
 - Add information about checkers and enabledness to the database, based
   on the `metadata.json`, if available.
 - Extend the `AnalysisInfo` API object to report the collected
   information to the client.

Refactoring:

 - Normalise the use of the `checkers` table by lifting additional
   checker-unique information (`severity`) from `reports`, leaving only
   a `FOREIGN KEY` in the `reports` table.
 - Add facilities for explicitly annotating `zlib`-compressed strings in
   the database.
 - Ensure that all versions of `metadata.json` is represented the same
   way in memory once the `MetadataInfoParser` succeeded.
  • Loading branch information
whisperity committed Jan 17, 2024
1 parent e7b5c6f commit 0fe1134
Show file tree
Hide file tree
Showing 41 changed files with 1,783 additions and 493 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ jobs:
run: sh .github/workflows/install-deps.sh

- name: Init .pgpass
if: matrix.database != 'sqlite'
run: |
echo '*:*:*:*:postgres' > $HOME/.pgpass
chmod 0600 $HOME/.pgpass
Expand All @@ -163,7 +164,10 @@ jobs:
env:
PGPASSWORD: postgres
run: |
export PGPASSFILE=$HOME/.pgpass
if [[ "${{ matrix.database != 'sqlite' }}" == "true" ]]
then
export PGPASSFILE=$HOME/.pgpass
fi
make pip_dev_deps
pip3 install -r web/requirements_py/auth/requirements.txt
Expand Down
35 changes: 0 additions & 35 deletions alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -61,38 +61,3 @@ script_location = web/server/codechecker_server/migrations/report

sqlalchemy.url = postgres://postgres@localhost:5432/default
#sqlalchemy.url = sqlite:////home/username/.codechecker/Default.sqlite

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
7 changes: 7 additions & 0 deletions analyzer/tests/unit/test_checker_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,13 @@ def initialize_labels_dir(self):
def test_checker_labels(self):
cl = CheckerLabels(self.labels_dir.name)

self.assertEqual(
sorted(cl.get_analyzers()),
sorted([
"clang-tidy",
"clangsa"
]))

self.assertEqual(
sorted(cl.checkers_by_labels([
'profile:extreme'])),
Expand Down
13 changes: 11 additions & 2 deletions codechecker_common/checker_labels.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import os
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
from collections import defaultdict

import os
from typing import Any, cast, DefaultDict, Dict, Iterable, List, Optional, \
Set, Tuple, Union

Expand Down Expand Up @@ -148,6 +154,9 @@ def __get_analyzer_data(
if analyzer is None or a == analyzer:
yield a, c

def get_analyzers(self) -> Iterable[str]:
return self.__data.keys()

def checkers_by_labels(
self,
filter_labels: Iterable[str],
Expand Down
36 changes: 33 additions & 3 deletions codechecker_common/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
"""
Util module.
"""


import itertools
import json
from typing import TextIO
from math import ceil
from typing import Callable, TextIO
import os

import portalocker

from codechecker_common.logger import get_logger
Expand Down Expand Up @@ -45,6 +45,36 @@ def chunks(iterator, n):
yield itertools.chain([first], rest_of_chunk)


def progress(g, count: int, n: int,
callback: Callable[[int, float], None]):
"""
Wraps a generator of a known total length and fires 'callback' after having
yielded every (T/N)th element. The 'callback' is given the index of the
element handled just before firing it, and the percentage of progress.
"""
# E.g., if count == 100 and n == 5, then becomes [100, 95, ..., 10, 5, 0].
try:
checkpoints = [count] + list(reversed(
[list(chk)[0]
for chk in chunks(
range(0, count + 1),
int(ceil(count / n))
)]))
if checkpoints[-1] == 0:
checkpoints.pop()
except ValueError:
# The range is too small to have (count / n) many slices.
checkpoints = [count]

i = 0
for e in g:
i = i + 1
yield e
if i == checkpoints[-1]:
callback(i, float(i) / count * 100)
checkpoints.pop()


def load_json(path: str, default=None, lock=False, display_warning=True):
"""
Load the contents of the given file as a JSON and return it's value,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -338,8 +338,8 @@ def __init__(
self.severity = severity
self.report_hash = report_hash
self.analyzer_name = analyzer_name
self.category = category
self.type = type
self.category = category # TODO: Remove this. DEPRECATED.
self.type = type # TODO: Remove this. DEPRECATED.
self.annotations = annotations

self.static_message = \
Expand Down Expand Up @@ -488,7 +488,13 @@ def to_json(self) -> Dict:
"severity": self.severity,
"report_hash": self.report_hash,
"analyzer_name": self.analyzer_name,
# DEPRECATED: 'category' is deprecated in 6.24.0, as it is not
# parsed, understood, or handled by the report server.
# It should be removed!
"category": self.category,
# DEPRECATED: 'type' is deprecated in 6.24.0, as it is not
# parsed, understood, or handled by the report server.
# It should be removed!
"type": self.type,
"review_status": self.review_status.status
if self.review_status else '',
Expand Down
Binary file not shown.
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/js/codechecker-api-node/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "codechecker-api",
"version": "6.54.0",
"version": "6.55.0",
"description": "Generated node.js compatible API stubs for CodeChecker server.",
"main": "lib",
"homepage": "https://github.com/Ericsson/codechecker",
Expand Down
Binary file modified web/api/py/codechecker_api/dist/codechecker_api.tar.gz
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/py/codechecker_api/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
with open('README.md', encoding='utf-8', errors="ignore") as f:
long_description = f.read()

api_version = '6.54.0'
api_version = '6.55.0'

setup(
name='codechecker_api',
Expand Down
Binary file not shown.
2 changes: 1 addition & 1 deletion web/api/py/codechecker_api_shared/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
with open('README.md', encoding='utf-8', errors="ignore") as f:
long_description = f.read()

api_version = '6.54.0'
api_version = '6.55.0'

setup(
name='codechecker_api_shared',
Expand Down
17 changes: 13 additions & 4 deletions web/api/report_server.thrift
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,8 @@ struct RunHistoryData {
4: string user, // User name who analysed the run.
5: string time, // Date time when the run was analysed.
6: i64 id, // Id of the run history tag.
7: string checkCommand, // Check command. !!!DEPRECATED!!! This field will be empty so use the getCheckCommand API function to get the check command for a run.
// !!!DEPRECATED!!! This field will be empty so use the getCheckCommand() API function to get the check command for a run.
7: string checkCommand,
8: string codeCheckerVersion, // CodeChecker client version of the latest analysis.
9: AnalyzerStatisticsData analyzerStatistics, // Statistics for analyzers. Only number of failed and successfully analyzed
// files field will be set. To get full analyzer statistics please use the
Expand Down Expand Up @@ -467,8 +468,16 @@ union AnalysisInfoFilter {
3: i64 reportId,
}

struct AnalysisInfoChecker {
1: optional bool enabled, // If the checker was enabled during the analysis.
}

struct AnalysisInfo {
1: string analyzerCommand,
1: string analyzerCommand,
// For each analyzer, the checkers and their status as was available during
// the analysis.
2: optional map<string, map<string,
AnalysisInfoChecker>> checkers,
}

typedef string CommitHash
Expand Down Expand Up @@ -534,12 +543,12 @@ service codeCheckerDBAccess {

// Get check command for a run.
// PERMISSION: PRODUCT_VIEW
// !DEPRECATED Use getAnalysisInfo API to get the check commands.
// !DEPRECATED Use getAnalysisInfo() API to get the check commands.
string getCheckCommand(1: i64 runHistoryId,
2: i64 runId)
throws (1: codechecker_api_shared.RequestFailed requestError),

// Get analyzer commands based on the given filters.
// Get analyzer execution information based on the given filters.
// PERMISSION: PRODUCT_VIEW
list<AnalysisInfo> getAnalysisInfo(1: AnalysisInfoFilter analysisInfoFilter,
2: i64 limit,
Expand Down
2 changes: 1 addition & 1 deletion web/codechecker_web/shared/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
# The newest supported minor version (value) for each supported major version
# (key) in this particular build.
SUPPORTED_VERSIONS = {
6: 54
6: 55
}

# Used by the client to automatically identify the latest major and minor
Expand Down
Loading

0 comments on commit 0fe1134

Please sign in to comment.