This patch adds `Collection.count_search_results()` method which
counts results from a search.
It's much faster than count `get_all_search_results()`, as we avoid
to create Entry instances for every results.
On a collection with 264 items, doing a search for '+', counting
all results with
len(list(collection.get_all_search_results()))
takes ~930 ms on my system, while
collection.count_search_results()
takes ~1.5 ms
---
lesana/collection.py | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/lesana/collection.py b/lesana/collection.py
index a3c6e22..00be506 100644
--- a/lesana/collection.py+++ b/lesana/collection.py
@@ -414,6 +414,20 @@ class Collection(object):
for match in mset:
yield self._match_to_entry(match)
offset += pagesize
++ def count_search_results(self):+ if not self._enquire:+ return+ count = 0+ offset = 0+ pagesize = 100+ while True:+ mset = self._enquire.get_mset(offset, pagesize)+ if mset.size() == 0:+ break+ count += mset.size()+ offset += pagesize+ return count def get_all_documents(self):
"""
--
2.36.0