Skip to content

Commit

Permalink
Test search doesn't get confused if entries change during update_sear…
Browse files Browse the repository at this point in the history
…ch().

For #175.
  • Loading branch information
lemon24 committed Jul 2, 2020
1 parent 992d11b commit d4363f6
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/reader/_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,11 +136,15 @@ def __init__(self, db: sqlite3.Connection):
[], timedelta
] = lambda: Storage.recent_threshold

self.db.create_function('strip_html', 1, self.strip_html)
self.db.create_function('json_object_get', 2, json_object_get)

@property
def chunk_size(self) -> int:
return self.get_chunk_size()

ddl_transaction = staticmethod(ddl_transaction)
strip_html = staticmethod(strip_html)

@wrap_exceptions(SearchError)
def enable(self) -> None:
Expand Down Expand Up @@ -307,10 +311,6 @@ def _update(self) -> None:
f"original import error: {bs4_import_error}"
) from bs4_import_error

# TODO: is it ok to define the same function many times on the same connection?
self.db.create_function('strip_html', 1, strip_html)
self.db.create_function('json_object_get', 2, json_object_get)

# FIXME: how do we test pagination?
self._delete_from_search()
self._delete_from_sync_state()
Expand Down
64 changes: 64 additions & 0 deletions tests/test_reader_search.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import threading
from datetime import datetime

import pytest
Expand All @@ -9,9 +10,11 @@
from reader import EntrySearchResult
from reader import FeedNotFoundError
from reader import HighlightedString
from reader import make_reader
from reader import Reader
from reader import ReaderError
from reader import SearchNotEnabledError
from reader import StorageError


@pytest.fixture(params=[False, True], ids=['without_entries', 'with_entries'])
Expand Down Expand Up @@ -439,3 +442,64 @@ def test_search_entries_sort_error(reader):
reader.enable_search()
with pytest.raises(ValueError):
set(reader.search_entries('one', sort='bad sort'))


def test_update_search_entry_changed_during_update(db_path, monkeypatch):
# This is a very intrusive test, maybe we should move it somewhere else.

reader = make_reader(db_path)
parser = reader._parser = Parser()

feed = parser.feed(1, datetime(2010, 1, 1), title='one')
parser.entry(1, 1, datetime(2010, 1, 1), title='one')
reader.add_feed(feed.url)
reader.update_feeds()

reader.enable_search()
reader.update_search()

feed = parser.feed(1, datetime(2010, 1, 2), title='two')
parser.entry(1, 1, datetime(2010, 1, 2), title='two')
reader.update_feed(feed.url)

in_strip_html = threading.Event()
can_return_from_strip_html = threading.Event()

def target():
from reader._search import Search

class MySearch(Search):
@staticmethod
def strip_html(*args, **kwargs):
in_strip_html.set()
can_return_from_strip_html.wait()
return Search.strip_html(*args, **kwargs)

# FIXME: remove monkeypatching when make_reader() gets a search_cls argument
monkeypatch.setattr('reader.core.Search', MySearch)

reader = make_reader(db_path)
reader.update_search()

thread = threading.Thread(target=target)
thread.start()

in_strip_html.wait()

try:
feed = parser.feed(1, datetime(2010, 1, 3), title='three')
parser.entry(1, 1, datetime(2010, 1, 3), title='three')
reader._storage.db.execute("PRAGMA busy_timeout = 0;")
reader.update_feed(feed.url)
expected_title = 'three'
except StorageError:
expected_title = 'two'
finally:
can_return_from_strip_html.set()
thread.join()

reader.update_search()

(entry,) = reader.get_entries()
(result,) = reader.search_entries('one OR two OR three')
assert entry.title == result.metadata['.title'].value == expected_title

0 comments on commit d4363f6

Please sign in to comment.