Should fix memory usage problem when crawling

This commit is contained in:
Simon
2018-06-14 23:36:54 -04:00
parent 9aed18c2d2
commit adb94cf326
6 changed files with 46 additions and 12 deletions

View File

@@ -107,7 +107,8 @@ class ElasticSearchEngine(SearchEngine):
if len(docs) >= import_every:
self._index(docs)
docs.clear()
self._index(docs)
if docs:
self._index(docs)
def _index(self, docs):
print("Indexing " + str(len(docs)) + " docs")