Skip to content

Commit

Permalink
feat: privacy_mode
Browse files Browse the repository at this point in the history
  • Loading branch information
k11kirky committed Jan 14, 2025
1 parent 190c628 commit 996cce3
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 15 deletions.
4 changes: 3 additions & 1 deletion llm_observability_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
posthog.debug = True
# change this to False to see usage events
posthog.privacy_mode = True

openai_client = OpenAI(
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
posthog_client=posthog,
posthog_client=posthog
)

async_openai_client = AsyncOpenAI(
Expand Down
2 changes: 2 additions & 0 deletions posthog/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
exception_autocapture_integrations = [] # type: List[Integrations]
# Used to determine in app paths for exception autocapture. Defaults to the current working directory
project_root = None # type: Optional[str]
# Used for our AI observability feature to not capture any prompt or output just usage + metadata
privacy_mode = False # type: bool

default_client = None # type: Optional[Client]

Expand Down
10 changes: 5 additions & 5 deletions posthog/ai/openai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
except ImportError:
raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'")

from posthog.ai.utils import call_llm_and_track_usage, get_model_params
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, with_privacy_mode
from posthog.client import Client as PostHogClient


Expand Down Expand Up @@ -142,15 +142,15 @@ def _capture_streaming_event(
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": kwargs.get("messages"),
"$ai_output": {
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("messages")),
"$ai_output": with_privacy_mode(self._client._ph_client, {
"choices": [
{
"content": output,
"role": "assistant",
}
]
},
}),
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
Expand Down Expand Up @@ -214,7 +214,7 @@ def create(
event_properties = {
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_input": kwargs.get("input"),
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("input")),
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_latency": latency,
Expand Down
10 changes: 5 additions & 5 deletions posthog/ai/openai/openai_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
except ImportError:
raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'")

from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, with_privacy_mode
from posthog.client import Client as PostHogClient


Expand Down Expand Up @@ -141,15 +141,15 @@ def _capture_streaming_event(
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": kwargs.get("messages"),
"$ai_output": {
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("messages")),
"$ai_output": with_privacy_mode(self._client._ph_client, {
"choices": [
{
"content": output,
"role": "assistant",
}
]
},
}),
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
Expand Down Expand Up @@ -213,7 +213,7 @@ async def create(
event_properties = {
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_input": kwargs.get("input"),
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("input")),
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_latency": latency,
Expand Down
13 changes: 9 additions & 4 deletions posthog/ai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ def call_llm_and_track_usage(
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": kwargs.get("messages"),
"$ai_output": format_response(response),
"$ai_input": with_privacy_mode(ph_client, kwargs.get("messages")),
"$ai_output": with_privacy_mode(ph_client, format_response(response)),
"$ai_http_status": http_status,
"$ai_input_tokens": input_tokens,
"$ai_output_tokens": output_tokens,
Expand Down Expand Up @@ -150,8 +150,8 @@ async def call_llm_and_track_usage_async(
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": kwargs.get("messages"),
"$ai_output": format_response(response),
"$ai_input": with_privacy_mode(ph_client, kwargs.get("messages")),
"$ai_output": with_privacy_mode(ph_client, format_response(response)),
"$ai_http_status": http_status,
"$ai_input_tokens": input_tokens,
"$ai_output_tokens": output_tokens,
Expand All @@ -176,3 +176,8 @@ async def call_llm_and_track_usage_async(
raise error

return response

def with_privacy_mode(ph_client: PostHogClient, value: Any):
if ph_client.privacy_mode:
return None
return value

0 comments on commit 996cce3

Please sign in to comment.