mirror of
https://github.com/simon987/chan_feed.git
synced 2025-04-10 14:06:42 +00:00
add 410chan
This commit is contained in:
parent
6b9db95637
commit
fb2f1419d8
14
chan/chan.py
14
chan/chan.py
@ -1,4 +1,5 @@
|
||||
from chan.alokal_json import AlokalJsonChanHelper
|
||||
from chan.chan410_html import Chan410HtmlChanHelper
|
||||
from chan.desuchan_html import DesuChanHtmlChanHelper
|
||||
from chan.doushio_html import DoushioHtmlChanHelper
|
||||
from chan.endchan_html import EndchanHtmlChanHelper
|
||||
@ -28,7 +29,7 @@ CHANS = {
|
||||
"news", "out", "po", "pol", "qst", "sci", "soc", "sp",
|
||||
"tg", "toy", "trv", "tv", "vp", "wsg", "wsr", "x"
|
||||
),
|
||||
rps=2
|
||||
rps=3 / 2
|
||||
),
|
||||
"lainchan": JsonChanHelper(
|
||||
2,
|
||||
@ -296,5 +297,16 @@ CHANS = {
|
||||
"test", "tlp", "tmp", "tv", "vg", "vipe", "wh", "xikkadvach", "ynet"
|
||||
),
|
||||
rps=1 / 5
|
||||
),
|
||||
"410chan": Chan410HtmlChanHelper(
|
||||
23,
|
||||
"http://410chan.org/",
|
||||
"http://410chan.org/",
|
||||
"/res/",
|
||||
"/src/",
|
||||
(
|
||||
"d", "b", "cu", "dev", "r", "a", "ts", "ci"
|
||||
),
|
||||
rps=1 / 60
|
||||
)
|
||||
}
|
||||
|
51
chan/chan410_html.py
Normal file
51
chan/chan410_html.py
Normal file
@ -0,0 +1,51 @@
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from chan.desuchan_html import DesuChanHtmlChanHelper
|
||||
|
||||
|
||||
def _ru_datefmt(text):
|
||||
return re.sub(r"\(.{2}\)", "", text)
|
||||
|
||||
|
||||
class Chan410HtmlChanHelper(DesuChanHtmlChanHelper):
|
||||
|
||||
def parse_threads_list(self, r):
|
||||
soup = BeautifulSoup(r.content.decode('utf-8', 'ignore'), "html.parser")
|
||||
|
||||
threads = []
|
||||
|
||||
for threadEl in soup.find_all("div", id=lambda tid: tid and re.match("thread([0-9]+)[a-zA-Z]*", tid)):
|
||||
omit = threadEl.find("span", class_="omittedposts")
|
||||
threads.append({
|
||||
"id": int(re.search("thread([0-9]+)[a-zA-Z]*", threadEl.get("id")).group(1)),
|
||||
"omit": int(omit.text.split(" ")[1]) if omit else 0
|
||||
})
|
||||
|
||||
return threads, None
|
||||
|
||||
@staticmethod
|
||||
def parse_thread(r):
|
||||
soup = BeautifulSoup(r.content.decode('utf-8', 'ignore'), "html.parser")
|
||||
|
||||
op_el = soup.find("form", id="delform")
|
||||
|
||||
for post_el in op_el.find_all("div", class_="reply"):
|
||||
yield {
|
||||
"id": int(post_el.get("id")[5:]),
|
||||
"type": "post",
|
||||
"html": str(post_el),
|
||||
"time": int(datetime.datetime.strptime(_ru_datefmt(op_el.find("span", class_="time").text),
|
||||
"%d.%m.%Y %H:%M:%S").timestamp())
|
||||
}
|
||||
post_el.decompose()
|
||||
|
||||
yield {
|
||||
"id": int(op_el.find("a", attrs={"name": lambda x: x and x.isdigit()}).get("name")),
|
||||
"type": "thread",
|
||||
"html": str(op_el),
|
||||
"time": int(datetime.datetime.strptime(_ru_datefmt(op_el.find("span", class_="time").text),
|
||||
"%d.%m.%Y %H:%M:%S").timestamp())
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user