Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update and improve documentation #617

Merged
merged 2 commits into from
Jan 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@ Find below a working example for submitting an event to your Event Stream:
from datadog import initialize, api

options = {
'api_key': '<YOUR_API_KEY>',
'app_key': '<YOUR_APP_KEY>'
"api_key": "<YOUR_API_KEY>",
"app_key": "<YOUR_APP_KEY>",
}

initialize(**options)

title = "Something big happened!"
text = 'And let me tell you all about it here!'
tags = ['version:1', 'application:web']
text = "And let me tell you all about it here!"
tags = ["version:1", "application:web"]

api.Event.create(title=title, text=text, tags=tags)
```
Expand All @@ -63,8 +63,8 @@ from datadog import initialize, api
initialize()

title = "Something big happened!"
text = 'And let me tell you all about it here!'
tags = ['version:1', 'application:web']
text = "And let me tell you all about it here!"
tags = ["version:1", "application:web"]

api.Event.create(title=title, text=text, tags=tags)
```
Expand All @@ -83,8 +83,8 @@ Once the Datadog Python Library is installed, instantiate the StatsD client usin
from datadog import initialize, statsd

options = {
'statsd_host':'127.0.0.1',
'statsd_port':8125
"statsd_host": "127.0.0.1",
"statsd_port": 8125,
}

initialize(**options)
Expand All @@ -100,7 +100,7 @@ Once the Datadog Python Library is installed, instantiate the StatsD client usin
from datadog import initialize, statsd

options = {
'statsd_socket_path' : PATH_TO_SOCKET
"statsd_socket_path": PATH_TO_SOCKET,
}

initialize(**options)
Expand Down Expand Up @@ -161,9 +161,9 @@ size should be used, you can set it with the parameter `max_buffer_len`. Example
from datadog import initialize

options = {
'api_key': '<YOUR_API_KEY>',
'app_key': '<YOUR_APP_KEY>',
'max_buffer_len': 4096
"api_key": "<YOUR_API_KEY>",
"app_key": "<YOUR_APP_KEY>",
"max_buffer_len": 4096,
}

initialize(**options)
Expand Down
52 changes: 26 additions & 26 deletions datadog/dogstatsd/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,10 +280,10 @@ def resolve_host(host, use_default_route):
"""
Resolve the DogStatsd host.

Args:
host (string): host
use_default_route (bool): use the system default route as host
(overrides the `host` parameter)
:param host: host
:type host: string
:param use_default_route: use the system default route as host (overrides the `host` parameter)
:type use_default_route: bool
"""
if not use_default_route:
return host
Expand Down Expand Up @@ -336,8 +336,8 @@ def open_buffer(self, max_buffer_size=None):
You can also use this as a context manager.

>>> with DogStatsd() as batch:
>>> batch.gauge('users.online', 123)
>>> batch.gauge('active.connections', 1001)
>>> batch.gauge("users.online", 123)
>>> batch.gauge("active.connections", 1001)
"""
if max_buffer_size is not None:
log.warning("The parameter max_buffer_size is now deprecated and is not used anymore")
Expand Down Expand Up @@ -366,8 +366,8 @@ def gauge(
Record the value of a gauge, optionally setting a list of tags and a
sample rate.

>>> statsd.gauge('users.online', 123)
>>> statsd.gauge('active.connections', 1001, tags=["protocol:http"])
>>> statsd.gauge("users.online", 123)
>>> statsd.gauge("active.connections", 1001, tags=["protocol:http"])
"""
return self._report(metric, 'g', value, tags, sample_rate)

Expand All @@ -382,8 +382,8 @@ def increment(
Increment a counter, optionally setting a value, tags and a sample
rate.

>>> statsd.increment('page.views')
>>> statsd.increment('files.transferred', 124)
>>> statsd.increment("page.views")
>>> statsd.increment("files.transferred", 124)
"""
self._report(metric, 'c', value, tags, sample_rate)

Expand All @@ -392,8 +392,8 @@ def decrement(self, metric, value=1, tags=None, sample_rate=None):
Decrement a counter, optionally setting a value, tags and a sample
rate.

>>> statsd.decrement('files.remaining')
>>> statsd.decrement('active.connections', 2)
>>> statsd.decrement("files.remaining")
>>> statsd.decrement("active.connections", 2)
"""
metric_value = -value if value else value
self._report(metric, 'c', metric_value, tags, sample_rate)
Expand All @@ -402,17 +402,17 @@ def histogram(self, metric, value, tags=None, sample_rate=None):
"""
Sample a histogram value, optionally setting tags and a sample rate.

>>> statsd.histogram('uploaded.file.size', 1445)
>>> statsd.histogram('album.photo.count', 26, tags=["gender:female"])
>>> statsd.histogram("uploaded.file.size", 1445)
>>> statsd.histogram("album.photo.count", 26, tags=["gender:female"])
"""
self._report(metric, 'h', value, tags, sample_rate)

def distribution(self, metric, value, tags=None, sample_rate=None):
"""
Send a global distribution value, optionally setting tags and a sample rate.

>>> statsd.distribution('uploaded.file.size', 1445)
>>> statsd.distribution('album.photo.count', 26, tags=["gender:female"])
>>> statsd.distribution("uploaded.file.size", 1445)
>>> statsd.distribution("album.photo.count", 26, tags=["gender:female"])
"""
self._report(metric, 'd', value, tags, sample_rate)

Expand All @@ -433,13 +433,13 @@ def timed(self, metric=None, tags=None, sample_rate=None, use_ms=None):
manager.
::

@statsd.timed('user.query.time', sample_rate=0.5)
@statsd.timed("user.query.time", sample_rate=0.5)
def get_user(user_id):
# Do what you need to ...
pass

# Is equivalent to ...
with statsd.timed('user.query.time', sample_rate=0.5):
with statsd.timed("user.query.time", sample_rate=0.5):
# Do what you need to ...
pass

Expand All @@ -448,7 +448,7 @@ def get_user(user_id):
try:
get_user(user_id)
finally:
statsd.timing('user.query.time', time.time() - start)
statsd.timing("user.query.time", time.time() - start)
"""
return TimedContextManagerDecorator(self, metric, tags, sample_rate, use_ms)

Expand All @@ -461,13 +461,13 @@ def distributed(self, metric=None, tags=None, sample_rate=None, use_ms=None):
The metric is required as a context manager.
::

@statsd.distributed('user.query.time', sample_rate=0.5)
@statsd.distributed("user.query.time", sample_rate=0.5)
def get_user(user_id):
# Do what you need to ...
pass

# Is equivalent to ...
with statsd.distributed('user.query.time', sample_rate=0.5):
with statsd.distributed("user.query.time", sample_rate=0.5):
# Do what you need to ...
pass

Expand All @@ -476,15 +476,15 @@ def get_user(user_id):
try:
get_user(user_id)
finally:
statsd.distribution('user.query.time', time.time() - start)
statsd.distribution("user.query.time", time.time() - start)
"""
return DistributedContextManagerDecorator(self, metric, tags, sample_rate, use_ms)

def set(self, metric, value, tags=None, sample_rate=None):
"""
Sample a set value.

>>> statsd.set('visitors.uniques', 999)
>>> statsd.set("visitors.uniques", 999)
"""
self._report(metric, 's', value, tags, sample_rate)

Expand Down Expand Up @@ -648,8 +648,8 @@ def event(self, title, text, alert_type=None, aggregation_key=None,
Send an event. Attributes are the same as the Event API.
http://docs.datadoghq.com/api/

>>> statsd.event('Man down!', 'This server needs assistance.')
>>> statsd.event('The web server restarted', 'The web server is up again', alert_type='success') # NOQA
>>> statsd.event("Man down!", "This server needs assistance.")
>>> statsd.event("The web server restarted", "The web server is up again", alert_type="success") # NOQA
"""
title = self._escape_event_content(title)
text = self._escape_event_content(text)
Expand Down Expand Up @@ -686,7 +686,7 @@ def service_check(self, check_name, status, tags=None, timestamp=None,
"""
Send a service check run.

>>> statsd.service_check('my_service.check_name', DogStatsd.WARNING)
>>> statsd.service_check("my_service.check_name", DogStatsd.WARNING)
"""
message = self._escape_service_check_message(message) if message is not None else ''

Expand Down
32 changes: 16 additions & 16 deletions datadog/threadstats/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,10 @@ def start(self, flush_interval=10, roll_up_interval=10, device=None,
using datadog module ``initialize`` method.

>>> from datadog import initialize, ThreadStats
>>> initialize(api_key='my_api_key')
>>> initialize(api_key="my_api_key")
>>> stats = ThreadStats()
>>> stats.start()
>>> stats.increment('home.page.hits')
>>> stats.increment("home.page.hits")

:param flush_interval: The number of seconds to wait between flushes.
:type flush_interval: int
Expand Down Expand Up @@ -149,9 +149,9 @@ def event(self, title, text, alert_type=None, aggregation_key=None,
"""
Send an event. Attributes are the same as the Event API. (http://docs.datadoghq.com/api/)

>>> stats.event('Man down!', 'This server needs assistance.')
>>> stats.event('The web server restarted', \
'The web server is up again', alert_type='success')
>>> stats.event("Man down!", "This server needs assistance.")
>>> stats.event("The web server restarted", \
"The web server is up again", alert_type="success")
"""
if not self._disabled:
# Append all client level tags to every event
Expand All @@ -175,8 +175,8 @@ def gauge(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, ho
such as total hard disk space, process uptime, total number of active
users, or number of rows in a database table.

>>> stats.gauge('process.uptime', time.time() - process_start_time)
>>> stats.gauge('cache.bytes.free', cache.get_free_bytes(), tags=['version:1.0'])
>>> stats.gauge("process.uptime", time.time() - process_start_time)
>>> stats.gauge("cache.bytes.free", cache.get_free_bytes(), tags=["version:1.0"])
"""
if not self._disabled:
self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, Gauge,
Expand All @@ -188,7 +188,7 @@ def set(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host
flushed as a gauge to Datadog. Optionally, specify a set of
tags to associate with the metric.

>>> stats.set('example_metric.set', "value_1", tags=['environement:dev'])
>>> stats.set("example_metric.set", "value_1", tags=["environement:dev"])
"""
if not self._disabled:
self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, Set,
Expand All @@ -212,8 +212,8 @@ def decrement(self, metric_name, value=1, timestamp=None, tags=None, sample_rate
Decrement a counter, optionally setting a value, tags and a sample
rate.

>>> stats.decrement('files.remaining')
>>> stats.decrement('active.connections', 2)
>>> stats.decrement("files.remaining")
>>> stats.decrement("active.connections", 2)
"""
if not self._disabled:
self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), -value,
Expand All @@ -226,7 +226,7 @@ def histogram(self, metric_name, value, timestamp=None, tags=None, sample_rate=1
average, count and the 75/85/95/99 percentiles. Optionally, specify
a list of ``tags`` to associate with the metric.

>>> stats.histogram('uploaded_file.size', uploaded_file.size())
>>> stats.histogram("uploaded_file.size", uploaded_file.size())
"""
if not self._disabled:
self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value,
Expand All @@ -239,7 +239,7 @@ def distribution(self, metric_name, value, timestamp=None, tags=None, sample_rat
median, average, count and the 50/75/90/95/99 percentiles. Optionally,
specify a list of ``tags`` to associate with the metric.

>>> stats.distribution('uploaded_file.size', uploaded_file.size())
>>> stats.distribution("uploaded_file.size", uploaded_file.size())
"""
if not self._disabled:
self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value,
Expand All @@ -263,7 +263,7 @@ def timer(self, metric_name, sample_rate=1, tags=None, host=None):
::

def get_user(user_id):
with stats.timer('user.query.time'):
with stats.timer("user.query.time"):
# Do what you need to ...
pass

Expand All @@ -274,7 +274,7 @@ def get_user(user_id):
# Do what you need to ...
pass
finally:
stats.histogram('user.query.time', time.time() - start)
stats.histogram("user.query.time", time.time() - start)
"""
start = monotonic()
try:
Expand All @@ -290,7 +290,7 @@ def timed(self, metric_name, sample_rate=1, tags=None, host=None):
Optionally specify a list of tags to associate with the metric.
::

@stats.timed('user.query.time')
@stats.timed("user.query.time")
def get_user(user_id):
# Do what you need to ...
pass
Expand All @@ -300,7 +300,7 @@ def get_user(user_id):
try:
get_user(user_id)
finally:
stats.histogram('user.query.time', time.time() - start)
stats.histogram("user.query.time", time.time() - start)
"""

def wrapper(func):
Expand Down
Loading