Added crawl logs page

This commit is contained in:
Simon
2018-06-18 11:41:26 -04:00
parent 99d64b658b
commit 400abc9a3c
9 changed files with 105 additions and 8 deletions

View File

@@ -5,12 +5,13 @@ import sqlite3
class TaskResult:
def __init__(self, status_code=None, file_count=0, start_time=0, end_time=0, website_id=0):
def __init__(self, status_code=None, file_count=0, start_time=0, end_time=0, website_id=0, indexed_time=0):
self.status_code = status_code
self.file_count = file_count
self.start_time = start_time
self.end_time = end_time
self.website_id = website_id
self.indexed_time = indexed_time
def to_json(self):
return {
@@ -18,7 +19,8 @@ class TaskResult:
"file_count": self.file_count,
"start_time": self.start_time,
"end_time": self.end_time,
"website_id": self.website_id
"website_id": self.website_id,
"indexed_time": self.indexed_time
}
@@ -126,3 +128,12 @@ class TaskManagerDatabase:
conn.commit()
return [TaskResult(r[0], r[1], r[2], r[3], r[4]) for r in db_result]
def get_all_results(self):
with sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) as conn:
cursor = conn.cursor()
cursor.execute("SELECT website_id, status_code, file_count, start_time, end_time, indexed_time "
"FROM TaskResult ORDER BY id ASC")
return [TaskResult(r[1], r[2], r[3].timestamp(), r[4].timestamp(), r[0], r[5].timestamp() if r[5] else None) for r in cursor.fetchall()]

View File

@@ -72,5 +72,13 @@ def get_file_list(website_id):
return abort(404)
@app.route("/task/logs/")
@auth.login_required
def get_task_logs():
json_str = json.dumps([result.to_json() for result in tm.get_all_results()])
return json_str
if __name__ == "__main__":
app.run(port=5001, host="0.0.0.0")

View File

@@ -13,7 +13,7 @@ CREATE TABLE TaskResult (
website_id INT,
status_code TEXT,
file_count INT,
start_time INT,
end_time INT,
indexed_time INT DEFAULT NULL
start_time TIMESTAMP,
end_time TIMESTAMP,
indexed_time TIMESTAMP DEFAULT NULL
);

View File

@@ -32,6 +32,9 @@ class TaskManager:
def get_non_indexed_results(self):
return self.db.get_non_indexed_results()
def get_all_results(self):
return self.db.get_all_results()
def execute_queued_task(self):
if len(self.current_tasks) <= self.max_processes: