diff --git a/app.py b/app.py
index 289f094..f1dcf0b 100644
--- a/app.py
+++ b/app.py
@@ -224,7 +224,7 @@ def try_enqueue(url):
if not od_util.is_valid_url(url):
return "Error: Invalid url. Make sure to include the appropriate scheme.", "danger"
- if od_util.is_blacklisted(url):
+ if db.is_blacklisted(url):
return "Error: " \
"Sorry, this website has been blacklisted. If you think " \
@@ -326,12 +326,35 @@ def admin_dashboard():
if "username" in session:
tokens = db.get_tokens()
+ blacklist = db.get_blacklist()
- return render_template("dashboard.html", api_tokens=tokens)
+ return render_template("dashboard.html", api_tokens=tokens, blacklist=blacklist)
else:
return abort(403)
+@app.route("/blacklist/add", methods=["POST"])
+def admin_blacklist_add():
+ if "username" in session:
+
+ url = request.form.get("url")
+ db.add_blacklist_website(url)
+ flash("Added item to blacklist", "success")
+ return redirect("/dashboard")
+
+ else:
+ return abort(403)
+
+
+@app.route("/blacklist//delete")
+def admin_blacklist_remove(blacklist_id):
+ if "username" in session:
+
+ db.remove_blacklist_website(blacklist_id)
+ flash("Removed blacklist item", "success")
+ return redirect("/dashboard")
+
+
@app.route("/generate_token", methods=["POST"])
def admin_generate_token():
if "username" in session:
diff --git a/database.py b/database.py
index 864cd15..78076c1 100644
--- a/database.py
+++ b/database.py
@@ -1,6 +1,6 @@
import sqlite3
import datetime
-import json
+from urllib.parse import urlparse
import os
import bcrypt
import uuid
@@ -10,6 +10,12 @@ class InvalidQueryException(Exception):
pass
+class BlacklistedWebsite:
+ def __init__(self, blacklist_id, url):
+ self.id = blacklist_id
+ self.netloc = url
+
+
class Website:
def __init__(self, url, logged_ip, logged_useragent, last_modified=None, website_id=None):
@@ -228,6 +234,43 @@ class Database:
yield doc
+ def add_blacklist_website(self, url):
+
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+ parsed_url = urlparse(url)
+ url = parsed_url.scheme + "://" + parsed_url.netloc
+ cursor.execute("INSERT INTO BlacklistedWebsite (url) VALUES (?)", (url, ))
+ conn.commit()
+
+ def remove_blacklist_website(self, blacklist_id):
+
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+
+ cursor.execute("DELETE FROM BlacklistedWebsite WHERE id=?", (blacklist_id, ))
+ conn.commit()
+
+ def is_blacklisted(self, url):
+
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+ parsed_url = urlparse(url)
+ url = parsed_url.scheme + "://" + parsed_url.netloc
+ print(url)
+ cursor.execute("SELECT id FROM BlacklistedWebsite WHERE url LIKE ? LIMIT 1", (url, ))
+
+ return cursor.fetchone() is not None
+
+ def get_blacklist(self):
+
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM BlacklistedWebsite")
+ return [BlacklistedWebsite(r[0], r[1]) for r in cursor.fetchall()]
+
+
diff --git a/init_script.sql b/init_script.sql
index b52e948..9b24f92 100644
--- a/init_script.sql
+++ b/init_script.sql
@@ -18,3 +18,8 @@ CREATE TABLE ApiToken (
token TEXT PRIMARY KEY NOT NULL,
description TEXT
);
+
+CREATE TABLE BlacklistedWebsite (
+ id INTEGER PRIMARY KEY NOT NULL,
+ url TEXT
+);
diff --git a/od_util.py b/od_util.py
index 6193a3a..a14c872 100644
--- a/od_util.py
+++ b/od_util.py
@@ -4,7 +4,6 @@ from bs4 import BeautifulSoup
import os
import validators
import re
-import mimetypes
from ftplib import FTP
@@ -198,11 +197,3 @@ def is_od(url):
print(e)
return False
-
-def is_blacklisted(url):
- with open("blacklist.txt", "r") as f:
- for line in f.readlines():
- if url.startswith(line.strip()):
- return True
-
- return False
diff --git a/templates/dashboard.html b/templates/dashboard.html
index 0d3361f..efcd5bf 100644
--- a/templates/dashboard.html
+++ b/templates/dashboard.html
@@ -43,11 +43,42 @@
+
+
+ Blacklist
+
+
+
+ Netloc |
+ Action |
+
+
+
+ {% for item in blacklist %}
+
+ {{ item.netloc }} |
+ Delete |
+
+ {% endfor %}
+
+
+
+
Misc actions
- Delete websites with no associated files that are not queued
+ Delete websites with no associated files that are
+ not queued
Logout