Fixes problem with parser module on windows (#4)

This commit is contained in:
simon987 2018-04-14 12:58:04 -04:00
parent 3596829b06
commit a13f5a082a
7 changed files with 39 additions and 39 deletions

View File

@ -1,7 +1,6 @@
import humanfriendly import humanfriendly
import datetime import datetime
import json import json
import operator
class ReportBuilder: class ReportBuilder:

View File

@ -1,5 +1,5 @@
import requests import requests
from parser import NginxParser, ApacheParser from parsing import NginxParser, ApacheParser
from crawl_report import ReportSaver, ReportBuilder from crawl_report import ReportSaver, ReportBuilder
import os import os

View File

@ -3,50 +3,52 @@ from crawler import Crawler
from crawl_report import ReportBuilder from crawl_report import ReportBuilder
from reddit_bot import CommentBuilder from reddit_bot import CommentBuilder
if len(sys.argv) > 1: if __name__ == "__main__":
command = sys.argv[1] if len(sys.argv) > 1:
if command == "crawl": command = sys.argv[1]
if len(sys.argv) > 2:
url = sys.argv[2]
c = Crawler(url, True) if command == "crawl":
c.crawl() if len(sys.argv) > 2:
url = sys.argv[2]
print("Done") c = Crawler(url, True)
r = ReportBuilder(c.files, url) c.crawl()
print(r.get_total_size_formatted())
if command == "mkreport": print("Done")
if len(sys.argv) > 3: r = ReportBuilder(c.files, url)
url = sys.argv[2] print(r.get_total_size_formatted())
report_id = sys.argv[3]
c = Crawler(url, True) if command == "mkreport":
c.crawl() if len(sys.argv) > 3:
url = sys.argv[2]
report_id = sys.argv[3]
print("Done") c = Crawler(url, True)
r = ReportBuilder(c.files, url) c.crawl()
print(r.get_total_size_formatted())
c.store_report(report_id, "") print("Done")
r = ReportBuilder(c.files, url)
print(r.get_total_size_formatted())
if command == "getcomment": c.store_report(report_id, "")
if len(sys.argv) > 3:
url = sys.argv[2]
report_id = sys.argv[3]
c = Crawler(url, True) if command == "getcomment":
c.crawl() if len(sys.argv) > 3:
url = sys.argv[2]
report_id = sys.argv[3]
print("Done") c = Crawler(url, True)
r = ReportBuilder(c.files, url) c.crawl()
print(r.get_total_size_formatted())
com_buider = CommentBuilder(ReportBuilder(c.files, c.base_url), url, report_id) print("Done")
print(com_buider.get_comment()) r = ReportBuilder(c.files, url)
print(r.get_total_size_formatted())
com_buider = CommentBuilder(ReportBuilder(c.files, c.base_url), url, report_id)
print(com_buider.get_comment())
else: else:
print("Invalid argument count") print("Invalid argument count")

3
run.py
View File

@ -1,4 +1,3 @@
import pickle
from crawler import Crawler from crawler import Crawler
from reddit_bot import RedditBot, TaskQueue, CrawTask, CommentBuilder, ReportBuilder from reddit_bot import RedditBot, TaskQueue, CrawTask, CommentBuilder, ReportBuilder
import time import time
@ -6,7 +5,7 @@ from multiprocessing import Process
import praw import praw
reddit = praw.Reddit('opendirectories-bot', reddit = praw.Reddit('opendirectories-bot',
user_agent='github.com/simon987/opendirectories-bot v1.0 (by /u/Hexahedr_n)') user_agent='github.com/simon987/opendirectories-bot v1.0 (by /u/Hexahedr_n)')
subreddit = reddit.subreddit("opendirectories") subreddit = reddit.subreddit("opendirectories")

View File

@ -1,6 +1,6 @@
from unittest import TestCase from unittest import TestCase
from parser import ApacheParser, NginxParser from parsing import ApacheParser, NginxParser
from crawler import Crawler from crawler import Crawler

View File

@ -1,4 +1,4 @@
from parser import NginxParser, PageParser, ApacheParser from parsing import NginxParser, PageParser, ApacheParser
from unittest import TestCase from unittest import TestCase