forked from getsentry/self-hosted
-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathsentry.conf.py.example
135 lines (102 loc) · 3.5 KB
/
sentry.conf.py.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
# This file is just Python, with a touch of Django which means
# you can inherit and tweak settings to your hearts content.
from sentry.conf.server import *
import os.path
CONF_ROOT = os.path.dirname(__file__)
DATABASES = {
'default': {
'ENGINE': 'sentry.db.postgres',
'NAME': 'sentry',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'AUTOCOMMIT': True,
'ATOMIC_REQUESTS': False,
}
}
# You should not change this setting after your database has been created
# unless you have altered all schemas first
SENTRY_USE_BIG_INTS = True
# If you're expecting any kind of real traffic on Sentry, we highly recommend
# configuring the CACHES and Redis settings
###########
# General #
###########
# Instruct Sentry that this install intends to be run by a single organization
# and thus various UI optimizations should be enabled.
SENTRY_SINGLE_ORGANIZATION = True
DEBUG = False
#########
# Cache #
#########
# Sentry currently utilizes two separate mechanisms. While CACHES is not a
# requirement, it will optimize several high throughput patterns.
# If you wish to use memcached, install the dependencies and adjust the config
# as shown:
#
# pip install python-memcached
#
# CACHES = {
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'LOCATION': ['127.0.0.1:11211'],
# }
# }
# A primary cache is required for things such as processing events
SENTRY_CACHE = 'sentry.cache.redis.RedisCache'
#########
# Queue #
#########
# See https://docs.sentry.io/on-premise/server/queue/ for more
# information on configuring your queue broker and workers. Sentry relies
# on a Python framework called Celery to manage queues.
BROKER_URL = 'redis://127.0.0.1:6379/0'
###############
# Rate Limits #
###############
# Rate limits apply to notification handlers and are enforced per-project
# automatically.
SENTRY_RATELIMITER = 'sentry.ratelimits.redis.RedisRateLimiter'
##################
# Update Buffers #
##################
# Buffers (combined with queueing) act as an intermediate layer between the
# database and the storage API. They will greatly improve efficiency on large
# numbers of the same events being sent to the API in a short amount of time.
# (read: if you send any kind of real data to Sentry, you should enable buffers)
SENTRY_BUFFER = 'sentry.buffer.redis.RedisBuffer'
##########
# Quotas #
##########
# Quotas allow you to rate limit individual projects or the Sentry install as
# a whole.
SENTRY_QUOTAS = 'sentry.quotas.redis.RedisQuota'
########
# TSDB #
########
# The TSDB is used for building charts as well as making things like per-rate
# alerts possible.
SENTRY_TSDB = 'sentry.tsdb.redis.RedisTSDB'
###########
# Digests #
###########
# The digest backend powers notification summaries.
SENTRY_DIGESTS = 'sentry.digests.backends.redis.RedisBackend'
##############
# Web Server #
##############
# If you're using a reverse SSL proxy, you should enable the X-Forwarded-Proto
# header and uncomment the following settings
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# If you're not hosting at the root of your web server,
# you need to uncomment and set it to the path where Sentry is hosted.
# FORCE_SCRIPT_NAME = '/sentry'
SENTRY_WEB_HOST = '0.0.0.0'
SENTRY_WEB_PORT = 9000
SENTRY_WEB_OPTIONS = {
# 'workers': 3, # the number of web workers
# 'protocol': 'uwsgi', # Enable uwsgi protocol instead of http
}