Skip to content

Commit

Permalink
wip 2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
RootLUG committed Aug 18, 2024
1 parent db1dd08 commit 34ab6ab
Show file tree
Hide file tree
Showing 23 changed files with 909 additions and 33 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@
Loccer is a **zero-dependency** library for creating error logs on a local system. It is designed to be used in air-gaped networks and highly secure environments where alternatives like Sentry are not viable. Logs are stored by default as a json formatted object per line.


Why choose loccer?
------------------

- **Zero external dependencies**, pure python. We take supply chain security very seriously and only use the built-in python modules.
- **Full data control**. You know exactly what data and where it is produced. This is very important factor for highly secure systems, where you specifically need to specify a folder that will contain the produced data. You have also full control over what data is captured by extending or stripping it down.
- **Easy to start**. No need to setup any servers that needs to be maintained. You only need 2 lines of code to get started with default configuration.

Usage
-----

Expand Down
52 changes: 52 additions & 0 deletions examples/metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import sys
import pprint
import time
import logging

import loccer
from loccer.outputs.file_stream import JSONFileOutput


logger = logging.Logger(__name__)
logger.setLevel(logging.DEBUG)

loccer.install(
enable_logging=logger,
output_handlers=(JSONFileOutput(
filename="metrics_logs.json",
),),
)


@loccer.span("decompose")
def factorize(value: int) -> list[int]:
loccer.span.set("value", value)
factors = []
for x in range(2, value):
if value % x == 0:
rem = value // x
logger.info("Found a factor of %d / %d = %d ", value, x, rem)
loccer.span.set("divisor", x)
time.sleep(0.3) # Add artificial delay
factors.append(x)
factors.extend(factorize(rem))
break

if not factors and value > 1:
logger.debug("No factors for %d found", value)
factors.append(value)
return factors


if __name__ == "__main__":
with loccer.trace("metrics_demo") as tr:
if len(sys.argv) == 1:
numbers = [7919*3*7*7907*7901*42]
else:
numbers = [int(x) for x in sys.argv[1:]]

for x in numbers:
factors = factorize(x)
print(f"Factors for `{x}` are: {factors}")

pprint.pprint(tr.snapshot())
9 changes: 7 additions & 2 deletions examples/quart_app.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
import asyncio
import logging

import quart

import loccer
from loccer.integrations.platform_context import PlatformIntegration
from loccer.integrations.quart_context import QuartContextIntegration
from loccer.integrations.asyncio_context import AsyncioContextIntegration
from loccer.outputs.file_stream import JSONFileOutput


logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
app = quart.Quart(__name__)

asyncio_ctx = AsyncioContextIntegration()
Expand Down Expand Up @@ -41,5 +43,8 @@ async def index_error():
loop = asyncio.get_event_loop()
# Some integrations may require additional activation outside the `loccer.install` call
loop.set_exception_handler(asyncio_ctx.loop_exception_handler)
app.run(debug=True, loop=loop, port=8080)
port = 8080
debug = True
logger.info("Starting the application", extra={"port": port, "debug": debug})
app.run(debug=debug, loop=loop, port=port)
# now go and open the page at http://localhost:8080/ to generate an error report
46 changes: 45 additions & 1 deletion loccer/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import logging
import os
import sys
import traceback as tb_module
Expand All @@ -8,6 +9,7 @@
from unittest.mock import patch

from . import bases
from . import tracing
from .outputs.misc import NullOutput
from .outputs.stderr import StderrOutput
from .integrations.platform_context import PlatformIntegration
Expand All @@ -20,6 +22,7 @@
PlatformIntegration(),
PackagesIntegration(),
)
DEFAULT_TRACE_FILE = "loccer_traces.tar"


class HybridContext:
Expand Down Expand Up @@ -69,6 +72,12 @@ def _call(self) -> None:
if exc_type and exc_val:
self.exc_handler(exc_type, exc_val, exc_tb)

def trace(self, label: str) -> tracing.Trace:
return tracing.Trace(label, finished_cb=self.trace_finished_callback)

def trace_finished_callback(self, trace: tracing.Trace) -> None:
pass


T_loccer_exchook = t.Callable[
[T_exc_type, T_exc_val, T_exc_tb, t.Optional[HybridContext], t.Optional[T_exc_hook]], None # pragma: no mutate
Expand All @@ -81,6 +90,7 @@ def __init__(
output_handlers: t.Sequence[bases.OutputBase] = DEFAULT_OUTPUT,
integrations: t.Sequence[bases.Integration] = DEFAULT_INTEGRATIONS,
exc_hook: t.Optional[T_loccer_exchook] = None,
trace_file: t.Optional[str] = DEFAULT_TRACE_FILE,
**kwargs: t.Any,
):
super().__init__(**kwargs)
Expand All @@ -94,6 +104,7 @@ def __init__(
self.output_handlers = output_handlers
self.integrations = integrations
self.session = bases.Session(self)
self.trace_file = trace_file

for x in integrations:
x.activate(self)
Expand Down Expand Up @@ -125,10 +136,22 @@ def emit_output(self, output: bases.LoccerOutput) -> None:

out_handler.output(output, lc=self)

if (tracer:=tracing.get_current()) is not None:
tracer.children.append(tracing.Event.from_loccer_output(output))

def log_metadata(self, data: JSONType) -> None:
log = bases.MetadataLog(data)
log = bases.MetadataLog("Metadata log", extra=data)
self.emit_output(log)

def trace_finished_callback(self, trace: tracing.Trace) -> None:
if self.trace_file is None:
return

# FIXME
from .outputs.tar import Tar
t = Tar(self.trace_file)
t.log_trace(trace)


capture_exception = HybridContext() # pragma: no mutate

Expand Down Expand Up @@ -170,6 +193,8 @@ def install(
preserve_previous: bool = True,
output_handlers: t.Sequence[bases.OutputBase] = DEFAULT_OUTPUT,
integrations: t.Sequence[bases.Integration] = DEFAULT_INTEGRATIONS,
trace_file: t.Optional[str] = DEFAULT_TRACE_FILE,
enable_logging: bool|logging.Logger = True
) -> Loccer:
"""
Installs loccer as a global exception handler and activates all it's integrations
Expand All @@ -181,7 +206,19 @@ def install(
"""
global capture_exception

lc_logger = None

if enable_logging is True:
lc_logger = logging.getLogger(None)
elif isinstance(enable_logging, logging.Logger):
lc_logger = enable_logging

lc = Loccer(output_handlers=output_handlers, integrations=integrations)

if lc_logger is not None:
log_handler = bases.LogHandler(lc)
lc_logger.addHandler(log_handler)

previous_hook: t.Optional[T_exc_hook]

if preserve_previous:
Expand All @@ -205,3 +242,10 @@ def restore() -> None:

capture_exception = HybridContext()
sys.excepthook = sys.__excepthook__



span = tracing.HybridSpanContext

def trace(label: str) -> tracing.Trace:
return get_hybrid_context().trace(label)
61 changes: 56 additions & 5 deletions loccer/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,22 @@
import abc
from abc import ABCMeta, abstractmethod
import datetime
import logging
import os
import traceback
import typing as t
import uuid
from logging.handlers import QueueHandler

from .ltypes import T_exc_type, T_exc_val, T_exc_tb, JSONType

if t.TYPE_CHECKING:
from . import Loccer
from .tracing import Trace


DEFAULT_MAX_LOG_SIZE = ((2**20) * 10)
DEFAULT_MAX_LOGS = 10


class T_Frame(t.TypedDict):
Expand All @@ -24,6 +31,7 @@ class T_Frame(t.TypedDict):

class LoccerOutput(metaclass=abc.ABCMeta):
def __init__(self) -> None:
self.id = uuid.uuid4()
self.ts = datetime.datetime.now(datetime.timezone.utc)
self.integrations_data: dict[str, JSONType] = {}

Expand All @@ -47,6 +55,7 @@ def __init__(
def as_json(self) -> dict[str, JSONType]:
data: dict[str, JSONType] = {
"loccer_type": "exception",
"id": self.id.hex,
"timestamp": self.ts.isoformat(),
"exc_type": self.exc_type.__name__,
"msg": str(self),
Expand All @@ -71,23 +80,64 @@ def as_json(self) -> dict[str, JSONType]:


class MetadataLog(LoccerOutput):
def __init__(self, data: JSONType) -> None:
def __init__(self, msg: str, *, extra: JSONType=None, level: t.Optional[str] = None, logger_name: t.Optional[str]=None, msg_pattern: t.Optional[str] = None) -> None:
super().__init__()
self.data = data
self.msg = msg
self.msg_pattern = msg_pattern
self.extra = extra
self.level = level
self.logger_name = logger_name

@classmethod
def from_log_record(cls, record: logging.LogRecord) -> MetadataLog:
return cls(
msg=record.getMessage(),
extra=getattr(record, "context", {}),
level=record.levelname,
logger_name=record.name,
msg_pattern=record.msg
)

def as_json(self) -> dict[str, JSONType]:
return {
"loccer_type": "metadata_log",
"data": self.data,
data = {
"loccer_type": "log",
"id": self.id.hex,
"timestamp": self.ts.isoformat(),
"msg": self.msg,
"extra": self.extra,
"integrations": self.integrations_data,
}

if self.logger_name is not None:
data["name"] = self.logger_name

if self.level is not None:
data["level"] = self.level

if self.msg_pattern is not None:
data["msg_pattern"] = self.msg_pattern

return data


class LogHandler(logging.StreamHandler):
def __init__(self, lc: Loccer) -> None:
super().__init__()
self.lc = lc

def emit(self, record: logging.LogRecord) -> None:
log_data = MetadataLog.from_log_record(record)
self.lc.emit_output(log_data)


class OutputBase(metaclass=ABCMeta):
@abstractmethod # pragma: no mutate
def output(self, exc: LoccerOutput, lc: Loccer) -> None:
...

def log_trace(self, trace: Trace) -> None:
raise RuntimeError("Trace output is not supported for this output format")


class Session(LoccerOutput):
def __init__(self, lc: Loccer) -> None:
Expand Down Expand Up @@ -119,6 +169,7 @@ def as_json(self) -> dict[str, JSONType]:
return {
"loccer_type": "session",
"session_id": self.session_id,
"timestamp": self.ts.isoformat(),
"data": self.session_data,
}

Expand Down
Loading

0 comments on commit 34ab6ab

Please sign in to comment.