Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Move things out of SQLBaseStore #6454

Merged
merged 7 commits into from
Dec 4, 2019
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Move event fetch vars to EventWorkStore
  • Loading branch information
erikjohnston committed Dec 4, 2019
commit 6b2867096b8a2cf8afdb5de2bab93bbf31f76065
12 changes: 0 additions & 12 deletions synapse/storage/_base.py
Original file line number Diff line number Diff line change
@@ -18,7 +18,6 @@
import logging
import random
import sys
import threading
import time
from typing import Iterable, Tuple

@@ -36,7 +35,6 @@
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
from synapse.types import get_domain_from_id
from synapse.util import batch_iter
from synapse.util.caches.descriptors import Cache
from synapse.util.stringutils import exception_to_unicode

# import a function which will return a monotonic time, in seconds
@@ -237,16 +235,6 @@ def __init__(self, db_conn, hs):
# to watch it
self._txn_perf_counters = PerformanceCounters()

self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
)

self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
self._event_fetch_ongoing = 0

self._pending_ds = []

self.database_engine = hs.database_engine

# A set of tables that are not safe to use native upserts in.
2 changes: 1 addition & 1 deletion synapse/storage/data_stores/main/client_ips.py
Original file line number Diff line number Diff line change
@@ -21,7 +21,7 @@

from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage import background_updates
from synapse.storage._base import Cache
from synapse.util.caches.descriptors import Cache
from synapse.util.caches import CACHE_SIZE_FACTOR

logger = logging.getLogger(__name__)
2 changes: 1 addition & 1 deletion synapse/storage/data_stores/main/devices.py
Original file line number Diff line number Diff line change
@@ -31,11 +31,11 @@
)
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import (
Cache,
SQLBaseStore,
db_to_json,
make_in_list_sql_clause,
)
from synapse.util.caches.descriptors import Cache
from synapse.storage.background_updates import BackgroundUpdateStore
from synapse.types import get_verify_key_from_cross_signing_key
from synapse.util import batch_iter
13 changes: 13 additions & 0 deletions synapse/storage/data_stores/main/events_worker.py
Original file line number Diff line number Diff line change
@@ -17,6 +17,7 @@

import itertools
import logging
import threading
from collections import namedtuple

from canonicaljson import json
@@ -34,6 +35,7 @@
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
from synapse.types import get_domain_from_id
from synapse.util import batch_iter
from synapse.util.caches.descriptors import Cache
from synapse.util.metrics import Measure

logger = logging.getLogger(__name__)
@@ -53,6 +55,17 @@


class EventsWorkerStore(SQLBaseStore):
def __init__(self, db_conn, hs):
super(EventsWorkerStore, self).__init__(db_conn, hs)

self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
)

self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
self._event_fetch_ongoing = 0

def get_received_ts(self, event_id):
"""Get received_ts (when it was persisted) for the event.