Fixed bugs, enhanced parser

This commit is contained in:
simon
2018-02-05 22:05:07 -05:00
parent f3dc1445e4
commit 23775ec126
12 changed files with 459 additions and 80 deletions

View File

@@ -1,4 +1,58 @@
import os
import json
class CrawTask:
def __init__(self, url, post_id, title):
self.url = url
self.post_id = post_id
self.post_title = title
class TaskQueue:
def __init__(self, file):
self.file = file
self.tasks = []
if os.path.isfile(self.file):
with open(self.file, "r") as f:
json_tasks = json.load(f)
for task in json_tasks:
self.tasks.append(CrawTask(task["url"], task["post_id"], task["post_title"]))
def push(self, task):
self.tasks.append(task)
self.update_file()
def pop(self):
if len(self.tasks) > 0:
t = self.tasks.pop()
self.update_file()
else:
t = None
return t
def update_file(self):
with open(self.file, "w") as f:
json.dump(self.tasks, f, default=dumper)
def is_queued(self, post_id):
for task in self.tasks:
if task.post_id == post_id:
return True
return False
def dumper(obj):
return obj.__dict__
class RedditBot:
@@ -11,8 +65,7 @@ class RedditBot:
self.crawled = []
else:
with open(log_file, "r") as f:
self.crawled = f.read().split("\n")
self.crawled = list(filter(None, self.crawled))
self.crawled = list(filter(None, f.read().split("\n")))
def log_crawl(self, post_id):