Add per-provider-async searching with status updates
parent
89beb92866
commit
607e1a8299
|
@ -44,6 +44,7 @@ import time
|
||||||
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import trio
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
from PyQt5 import QtCore, QtGui
|
from PyQt5 import QtCore, QtGui
|
||||||
from PyQt5 import QtWidgets
|
from PyQt5 import QtWidgets
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
|
@ -347,6 +348,21 @@ class CompleterView(QTreeView):
|
||||||
# remove section as well
|
# remove section as well
|
||||||
# model.removeRow(i, QModelIndex())
|
# model.removeRow(i, QModelIndex())
|
||||||
|
|
||||||
|
if status_field is not None:
|
||||||
|
model.setItem(idx.row(), 1, QStandardItem(status_field))
|
||||||
|
else:
|
||||||
|
model.setItem(idx.row(), 1, QStandardItem())
|
||||||
|
|
||||||
|
# XXX: not idea how to use this
|
||||||
|
# model.setItemData(
|
||||||
|
# idx,
|
||||||
|
# {
|
||||||
|
# 0: 'cache',
|
||||||
|
# 1: 'searching',
|
||||||
|
# }
|
||||||
|
# )
|
||||||
|
self.resize()
|
||||||
|
|
||||||
return idx
|
return idx
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -368,15 +384,15 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
model.setHorizontalHeaderLabels(self.labels)
|
model.setHorizontalHeaderLabels(self.labels)
|
||||||
|
|
||||||
|
|
||||||
section_idx = self.clear_section(section)
|
section_idx = self.clear_section(section)
|
||||||
|
|
||||||
# for key, values in results.items():
|
# if we can't find a section start adding to the root
|
||||||
|
|
||||||
if section_idx is None:
|
if section_idx is None:
|
||||||
root = model.invisibleRootItem()
|
root = model.invisibleRootItem()
|
||||||
section_item = QStandardItem(section)
|
section_item = QStandardItem(section)
|
||||||
root.appendRow(section_item)
|
blank = QStandardItem('')
|
||||||
|
root.appendRow([section_item, blank])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
section_item = model.itemFromIndex(section_idx)
|
section_item = model.itemFromIndex(section_idx)
|
||||||
|
|
||||||
|
@ -553,7 +569,11 @@ class SearchWidget(QtGui.QWidget):
|
||||||
node = model.itemFromIndex(cidx.siblingAtColumn(1))
|
node = model.itemFromIndex(cidx.siblingAtColumn(1))
|
||||||
if node:
|
if node:
|
||||||
symbol = node.text()
|
symbol = node.text()
|
||||||
provider = node.parent().text()
|
try:
|
||||||
|
provider = node.parent().text()
|
||||||
|
except AttributeError:
|
||||||
|
# no text set
|
||||||
|
return None
|
||||||
|
|
||||||
# TODO: move this to somewhere non-search machinery specific?
|
# TODO: move this to somewhere non-search machinery specific?
|
||||||
if provider == 'cache':
|
if provider == 'cache':
|
||||||
|
@ -569,6 +589,47 @@ _search_active: trio.Event = trio.Event()
|
||||||
_search_enabled: bool = False
|
_search_enabled: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
async def pack_matches(
|
||||||
|
view: CompleterView,
|
||||||
|
has_results: dict[str, set[str]],
|
||||||
|
matches: dict[(str, str), [str]],
|
||||||
|
provider: str,
|
||||||
|
pattern: str,
|
||||||
|
search: Callable[..., Awaitable[dict]],
|
||||||
|
task_status: TaskStatus[
|
||||||
|
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
log.info(f'Searching {provider} for "{pattern}"')
|
||||||
|
if provider != 'cache':
|
||||||
|
view.set_section_entries(
|
||||||
|
section=provider,
|
||||||
|
values=[],
|
||||||
|
)
|
||||||
|
view.clear_section(provider, status_field='-> searchin..')
|
||||||
|
|
||||||
|
else: # for the cache just clear it's entries and don't put a status
|
||||||
|
view.clear_section(provider)
|
||||||
|
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
task_status.started(cs)
|
||||||
|
# ensure ^ status is updated
|
||||||
|
results = await search(pattern)
|
||||||
|
|
||||||
|
if provider != 'cache':
|
||||||
|
matches[(provider, pattern)] = results
|
||||||
|
|
||||||
|
# print(f'results from {provider}: {results}')
|
||||||
|
has_results[pattern].add(provider)
|
||||||
|
|
||||||
|
if results:
|
||||||
|
view.set_section_entries(
|
||||||
|
section=provider,
|
||||||
|
values=results,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def fill_results(
|
async def fill_results(
|
||||||
|
|
||||||
search: SearchBar,
|
search: SearchBar,
|
||||||
|
@ -584,9 +645,7 @@ async def fill_results(
|
||||||
completion results.
|
completion results.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
global _search_active, _search_enabled
|
global _search_active, _search_enabled, _searcher_cache
|
||||||
|
|
||||||
multisearch = get_multi_search()
|
|
||||||
|
|
||||||
bar = search.bar
|
bar = search.bar
|
||||||
view = bar.view
|
view = bar.view
|
||||||
|
@ -596,6 +655,10 @@ async def fill_results(
|
||||||
repeats = 0
|
repeats = 0
|
||||||
last_patt = None
|
last_patt = None
|
||||||
|
|
||||||
|
# cache of prior patterns to search results
|
||||||
|
matches = defaultdict(list)
|
||||||
|
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
await _search_active.wait()
|
await _search_active.wait()
|
||||||
period = None
|
period = None
|
||||||
|
@ -639,33 +702,47 @@ async def fill_results(
|
||||||
|
|
||||||
log.debug(f'Search req for {text}')
|
log.debug(f'Search req for {text}')
|
||||||
|
|
||||||
# issue multi-provider fan-out search request
|
already_has_results = has_results[text]
|
||||||
results = await multisearch(text, period=period)
|
|
||||||
|
|
||||||
# matches = {}
|
# issue multi-provider fan-out search request and place
|
||||||
# unmatches = []
|
# "searching.." statuses on outstanding results providers
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
if _search_enabled:
|
for provider, (search, pause) in _searcher_cache.items():
|
||||||
|
print(provider)
|
||||||
|
|
||||||
for (provider, pattern), output in results.items():
|
# TODO: put "searching..." status in result field
|
||||||
if output:
|
|
||||||
# matches[provider] = output
|
if provider != 'cache':
|
||||||
view.set_section_entries(
|
view.clear_section(
|
||||||
section=provider,
|
provider, status_field='-> searchin..')
|
||||||
values=output,
|
|
||||||
|
# only conduct search on this backend if it's
|
||||||
|
# registered for the corresponding pause period.
|
||||||
|
if (period >= pause) and (
|
||||||
|
provider not in already_has_results
|
||||||
|
):
|
||||||
|
await n.start(
|
||||||
|
pack_matches,
|
||||||
|
view,
|
||||||
|
has_results,
|
||||||
|
matches,
|
||||||
|
provider,
|
||||||
|
text,
|
||||||
|
search
|
||||||
)
|
)
|
||||||
|
else: # already has results for this input text
|
||||||
|
results = matches[(provider, text)]
|
||||||
|
if results:
|
||||||
|
view.set_section_entries(
|
||||||
|
section=provider,
|
||||||
|
values=results,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
view.clear_section(provider)
|
||||||
|
|
||||||
else:
|
if last_patt is None or last_patt != text:
|
||||||
view.clear_section(provider)
|
view.select_first()
|
||||||
|
|
||||||
if last_patt is None or last_patt != text:
|
|
||||||
view.select_first()
|
|
||||||
|
|
||||||
# only change select on first search iteration,
|
|
||||||
# late results from other providers should **not**
|
|
||||||
# move the current selection
|
|
||||||
# if pattern not in patt_searched:
|
|
||||||
# patt_searched[pattern].append(provider)
|
|
||||||
|
|
||||||
last_patt = text
|
last_patt = text
|
||||||
bar.show()
|
bar.show()
|
||||||
|
@ -698,7 +775,6 @@ async def handle_keyboard_input(
|
||||||
partial(
|
partial(
|
||||||
fill_results,
|
fill_results,
|
||||||
search,
|
search,
|
||||||
# multisearch,
|
|
||||||
recv,
|
recv,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -815,7 +891,6 @@ async def handle_keyboard_input(
|
||||||
if parent_item and parent_item.text() == 'cache':
|
if parent_item and parent_item.text() == 'cache':
|
||||||
|
|
||||||
value = search.get_current_item()
|
value = search.get_current_item()
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
provider, symbol = value
|
provider, symbol = value
|
||||||
chart.load_symbol(
|
chart.load_symbol(
|
||||||
|
@ -851,47 +926,6 @@ async def search_simple_dict(
|
||||||
_searcher_cache: Dict[str, Callable[..., Awaitable]] = {}
|
_searcher_cache: Dict[str, Callable[..., Awaitable]] = {}
|
||||||
|
|
||||||
|
|
||||||
def get_multi_search() -> Callable[..., Awaitable]:
|
|
||||||
|
|
||||||
global _searcher_cache
|
|
||||||
|
|
||||||
async def multisearcher(
|
|
||||||
pattern: str,
|
|
||||||
period: str,
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
# nonlocal matches
|
|
||||||
matches = {}
|
|
||||||
|
|
||||||
async def pack_matches(
|
|
||||||
provider: str,
|
|
||||||
pattern: str,
|
|
||||||
search: Callable[..., Awaitable[dict]],
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
log.info(f'Searching {provider} for "{pattern}"')
|
|
||||||
results = await search(pattern)
|
|
||||||
# print(f'results from {provider}: {results}')
|
|
||||||
matches[(provider, pattern)] = results
|
|
||||||
|
|
||||||
# TODO: make this an async stream?
|
|
||||||
async with trio.open_nursery() as n:
|
|
||||||
|
|
||||||
for provider, (search, min_pause) in _searcher_cache.items():
|
|
||||||
|
|
||||||
# only conduct search on this backend if it's registered
|
|
||||||
# for the corresponding pause period.
|
|
||||||
if period >= min_pause and (provider, pattern) not in matches:
|
|
||||||
# print(
|
|
||||||
# f'searching {provider} after {period} > {min_pause}')
|
|
||||||
n.start_soon(pack_matches, provider, pattern, search)
|
|
||||||
|
|
||||||
return matches
|
|
||||||
|
|
||||||
return multisearcher
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def register_symbol_search(
|
async def register_symbol_search(
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue