mirror of
				https://github.com/simon987/opendirectories-bot.git
				synced 2025-11-04 07:36:51 +00:00 
			
		
		
		
	Fixes problem with parser module on windows (#4)
This commit is contained in:
		
							parent
							
								
									3596829b06
								
							
						
					
					
						commit
						a13f5a082a
					
				@ -1,7 +1,6 @@
 | 
			
		||||
import humanfriendly
 | 
			
		||||
import datetime
 | 
			
		||||
import json
 | 
			
		||||
import operator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ReportBuilder:
 | 
			
		||||
 | 
			
		||||
@ -1,5 +1,5 @@
 | 
			
		||||
import requests
 | 
			
		||||
from parser import NginxParser, ApacheParser
 | 
			
		||||
from parsing import NginxParser, ApacheParser
 | 
			
		||||
from crawl_report import ReportSaver, ReportBuilder
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										68
									
								
								manual.py
									
									
									
									
									
								
							
							
						
						
									
										68
									
								
								manual.py
									
									
									
									
									
								
							@ -3,50 +3,52 @@ from crawler import Crawler
 | 
			
		||||
from crawl_report import ReportBuilder
 | 
			
		||||
from reddit_bot import CommentBuilder
 | 
			
		||||
 | 
			
		||||
if len(sys.argv) > 1:
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
 | 
			
		||||
    command = sys.argv[1]
 | 
			
		||||
    if len(sys.argv) > 1:
 | 
			
		||||
 | 
			
		||||
    if command == "crawl":
 | 
			
		||||
        if len(sys.argv) > 2:
 | 
			
		||||
            url = sys.argv[2]
 | 
			
		||||
        command = sys.argv[1]
 | 
			
		||||
 | 
			
		||||
            c = Crawler(url, True)
 | 
			
		||||
            c.crawl()
 | 
			
		||||
        if command == "crawl":
 | 
			
		||||
            if len(sys.argv) > 2:
 | 
			
		||||
                url = sys.argv[2]
 | 
			
		||||
 | 
			
		||||
            print("Done")
 | 
			
		||||
            r = ReportBuilder(c.files, url)
 | 
			
		||||
            print(r.get_total_size_formatted())
 | 
			
		||||
                c = Crawler(url, True)
 | 
			
		||||
                c.crawl()
 | 
			
		||||
 | 
			
		||||
    if command == "mkreport":
 | 
			
		||||
        if len(sys.argv) > 3:
 | 
			
		||||
            url = sys.argv[2]
 | 
			
		||||
            report_id = sys.argv[3]
 | 
			
		||||
                print("Done")
 | 
			
		||||
                r = ReportBuilder(c.files, url)
 | 
			
		||||
                print(r.get_total_size_formatted())
 | 
			
		||||
 | 
			
		||||
            c = Crawler(url, True)
 | 
			
		||||
            c.crawl()
 | 
			
		||||
        if command == "mkreport":
 | 
			
		||||
            if len(sys.argv) > 3:
 | 
			
		||||
                url = sys.argv[2]
 | 
			
		||||
                report_id = sys.argv[3]
 | 
			
		||||
 | 
			
		||||
            print("Done")
 | 
			
		||||
            r = ReportBuilder(c.files, url)
 | 
			
		||||
            print(r.get_total_size_formatted())
 | 
			
		||||
                c = Crawler(url, True)
 | 
			
		||||
                c.crawl()
 | 
			
		||||
 | 
			
		||||
            c.store_report(report_id, "")
 | 
			
		||||
                print("Done")
 | 
			
		||||
                r = ReportBuilder(c.files, url)
 | 
			
		||||
                print(r.get_total_size_formatted())
 | 
			
		||||
 | 
			
		||||
    if command == "getcomment":
 | 
			
		||||
        if len(sys.argv) > 3:
 | 
			
		||||
            url = sys.argv[2]
 | 
			
		||||
            report_id = sys.argv[3]
 | 
			
		||||
                c.store_report(report_id, "")
 | 
			
		||||
 | 
			
		||||
            c = Crawler(url, True)
 | 
			
		||||
            c.crawl()
 | 
			
		||||
        if command == "getcomment":
 | 
			
		||||
            if len(sys.argv) > 3:
 | 
			
		||||
                url = sys.argv[2]
 | 
			
		||||
                report_id = sys.argv[3]
 | 
			
		||||
 | 
			
		||||
            print("Done")
 | 
			
		||||
            r = ReportBuilder(c.files, url)
 | 
			
		||||
            print(r.get_total_size_formatted())
 | 
			
		||||
                c = Crawler(url, True)
 | 
			
		||||
                c.crawl()
 | 
			
		||||
 | 
			
		||||
            com_buider = CommentBuilder(ReportBuilder(c.files, c.base_url), url, report_id)
 | 
			
		||||
            print(com_buider.get_comment())
 | 
			
		||||
                print("Done")
 | 
			
		||||
                r = ReportBuilder(c.files, url)
 | 
			
		||||
                print(r.get_total_size_formatted())
 | 
			
		||||
 | 
			
		||||
                com_buider = CommentBuilder(ReportBuilder(c.files, c.base_url), url, report_id)
 | 
			
		||||
                print(com_buider.get_comment())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
else:
 | 
			
		||||
    print("Invalid argument count")
 | 
			
		||||
    else:
 | 
			
		||||
        print("Invalid argument count")
 | 
			
		||||
							
								
								
									
										3
									
								
								run.py
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								run.py
									
									
									
									
									
								
							@ -1,4 +1,3 @@
 | 
			
		||||
import pickle
 | 
			
		||||
from crawler import Crawler
 | 
			
		||||
from reddit_bot import RedditBot, TaskQueue, CrawTask, CommentBuilder, ReportBuilder
 | 
			
		||||
import time
 | 
			
		||||
@ -6,7 +5,7 @@ from multiprocessing import Process
 | 
			
		||||
import praw
 | 
			
		||||
 | 
			
		||||
reddit = praw.Reddit('opendirectories-bot',
 | 
			
		||||
 user_agent='github.com/simon987/opendirectories-bot v1.0  (by /u/Hexahedr_n)')
 | 
			
		||||
                     user_agent='github.com/simon987/opendirectories-bot v1.0  (by /u/Hexahedr_n)')
 | 
			
		||||
 | 
			
		||||
subreddit = reddit.subreddit("opendirectories")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,6 @@
 | 
			
		||||
from unittest import TestCase
 | 
			
		||||
 | 
			
		||||
from parser import ApacheParser, NginxParser
 | 
			
		||||
from parsing import ApacheParser, NginxParser
 | 
			
		||||
from crawler import Crawler
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,4 @@
 | 
			
		||||
from parser import NginxParser, PageParser, ApacheParser
 | 
			
		||||
from parsing import NginxParser, PageParser, ApacheParser
 | 
			
		||||
 | 
			
		||||
from unittest import TestCase
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user