Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Add more logging to debug slow startup #8264

Merged
merged 2 commits into from
Sep 7, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/8264.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add more logging to debug slow startup.
23 changes: 16 additions & 7 deletions synapse/storage/databases/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,17 +47,24 @@ def __init__(self, main_store_class, hs):
engine = create_engine(database_config.config)

with make_conn(database_config, engine) as db_conn:
logger.info("Preparing database %r...", db_name)

logger.info("[database config %r]: Checking database server", db_name)
engine.check_database(db_conn)

logger.info(
"[database config %r]: Preparing for databases %r",
db_name,
database_config.databases,
)
prepare_database(
db_conn, engine, hs.config, databases=database_config.databases,
)

database = DatabasePool(hs, database_config, engine)

if "main" in database_config.databases:
logger.info("Starting 'main' data store")
logger.info(
"[database config %r]: Starting 'main' database", db_name
)

# Sanity check we don't try and configure the main store on
# multiple databases.
Expand All @@ -72,7 +79,9 @@ def __init__(self, main_store_class, hs):
persist_events = PersistEventsStore(hs, database, main)

if "state" in database_config.databases:
logger.info("Starting 'state' data store")
logger.info(
"[database config %r]: Starting 'state' database", db_name
)

# Sanity check we don't try and configure the state store on
# multiple databases.
Expand All @@ -85,7 +94,7 @@ def __init__(self, main_store_class, hs):

self.databases.append(database)

logger.info("Database %r prepared", db_name)
logger.info("[database config %r]: prepared", db_name)

# Closing the context manager doesn't close the connection.
# psycopg will close the connection when the object gets GCed, but *only*
Expand All @@ -98,10 +107,10 @@ def __init__(self, main_store_class, hs):

# Sanity check that we have actually configured all the required stores.
if not main:
raise Exception("No 'main' data store configured")
raise Exception("No 'main' database configured")

if not state:
raise Exception("No 'state' data store configured")
raise Exception("No 'state' database configured")

# We use local variables here to ensure that the databases do not have
# optional types.
Expand Down
1 change: 1 addition & 0 deletions synapse/storage/databases/main/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,6 +591,7 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig
"""Called before upgrading an existing database to check that it is broadly sane
compared with the configuration.
"""
logger.info("Running sanity-checks on database...")
domain = config.server_name

sql = database_engine.convert_param_style(
Expand Down
10 changes: 10 additions & 0 deletions synapse/storage/prepare_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,18 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state

try:
cur = db_conn.cursor()

logger.info("%r: Checking existing schema version", databases)
version_info = _get_or_create_schema_state(cur, database_engine)

if version_info:
user_version, delta_files, upgraded = version_info
logger.info(
"%r: Existing schema is %i (+%i deltas)",
databases,
user_version,
len(delta_files),
)

# config should only be None when we are preparing an in-memory SQLite db,
# which should be empty.
Expand All @@ -111,6 +119,8 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state
databases=databases,
)
else:
logger.info("%r: Initialising new database", databases)

# if it's a worker app, refuse to upgrade the database, to avoid multiple
# workers doing it at once.
if config and config.worker_app is not None:
Expand Down
5 changes: 5 additions & 0 deletions synapse/storage/util/id_generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import contextlib
import heapq
import logging
import threading
from collections import deque
from typing import Dict, List, Set
Expand All @@ -24,6 +25,8 @@
from synapse.storage.database import DatabasePool, LoggingTransaction
from synapse.storage.util.sequence import PostgresSequenceGenerator

logger = logging.getLogger(__name__)


class IdGenerator:
def __init__(self, db_conn, table, column):
Expand All @@ -48,6 +51,8 @@ def _load_current_id(db_conn, table, column, step=1):
Returns:
int
"""
# debug logging for https://github.com/matrix-org/synapse/issues/7968
logger.info("initialising stream generator for %s(%s)", table, column)
cur = db_conn.cursor()
if step == 1:
cur.execute("SELECT MAX(%s) FROM %s" % (column, table))
Expand Down