mirror of
https://github.com/simon987/od-database.git
synced 2025-04-20 02:46:45 +00:00
revert task queuing
This commit is contained in:
parent
6e491513bf
commit
4ce807c8a0
8
app.py
8
app.py
@ -604,6 +604,14 @@ def api_complete_task():
|
|||||||
filename = None
|
filename = None
|
||||||
taskManager.complete_task(filename, task, task_result, name)
|
taskManager.complete_task(filename, task, task_result, name)
|
||||||
|
|
||||||
|
if filename and os.path.exists(filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
# Handle task callback
|
||||||
|
callback = PostCrawlCallbackFactory.get_callback(task)
|
||||||
|
if callback:
|
||||||
|
callback.run(task_result, search)
|
||||||
|
|
||||||
return "Successfully logged task result and indexed files"
|
return "Successfully logged task result and indexed files"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
27
tasks.py
27
tasks.py
@ -65,23 +65,7 @@ class TaskManager:
|
|||||||
self.search = ElasticSearchEngine("od-database")
|
self.search = ElasticSearchEngine("od-database")
|
||||||
self.db = database.Database("db.sqlite3")
|
self.db = database.Database("db.sqlite3")
|
||||||
|
|
||||||
self.to_index_queue = []
|
def complete_task(self, file_list):
|
||||||
|
|
||||||
self.scheduler = BackgroundScheduler()
|
|
||||||
self.scheduler.add_job(self._do_index, "interval", seconds=0.1, max_instances=2)
|
|
||||||
self.scheduler._logger.setLevel("ERROR")
|
|
||||||
self.scheduler.start()
|
|
||||||
|
|
||||||
def complete_task(self, file_list, task, task_result, crawler_name):
|
|
||||||
|
|
||||||
self.to_index_queue.append((file_list, task, task_result, crawler_name))
|
|
||||||
logger.info("Queued tasks: " + str(len(self.to_index_queue)))
|
|
||||||
|
|
||||||
def _do_index(self):
|
|
||||||
if len(self.to_index_queue) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
from callbacks import PostCrawlCallbackFactory
|
|
||||||
|
|
||||||
file_list, task, task_result, crawler_name = self.to_index_queue.pop()
|
file_list, task, task_result, crawler_name = self.to_index_queue.pop()
|
||||||
self.search.delete_docs(task_result.website_id)
|
self.search.delete_docs(task_result.website_id)
|
||||||
@ -101,16 +85,9 @@ class TaskManager:
|
|||||||
|
|
||||||
task_result.server_id = crawler_name
|
task_result.server_id = crawler_name
|
||||||
|
|
||||||
if file_list and os.path.exists(file_list):
|
|
||||||
os.remove(file_list)
|
|
||||||
|
|
||||||
# Handle task callback
|
|
||||||
callback = PostCrawlCallbackFactory.get_callback(task)
|
|
||||||
if callback:
|
|
||||||
callback.run(task_result, self.search)
|
|
||||||
|
|
||||||
self.db.log_result(task_result)
|
self.db.log_result(task_result)
|
||||||
|
|
||||||
|
|
||||||
def queue_task(self, task: Task):
|
def queue_task(self, task: Task):
|
||||||
self.db.put_task(task)
|
self.db.put_task(task)
|
||||||
print("Queued task and made it available to crawlers: " + str(task.website_id))
|
print("Queued task and made it available to crawlers: " + str(task.website_id))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user