docker-compose setup (wip)

This commit is contained in:
simon 2019-11-13 20:36:09 -05:00
parent df8ab7727b
commit 7f121d2ac0
5 changed files with 48 additions and 47 deletions

4
app.py
View File

@ -1,6 +1,3 @@
import time
time.sleep(60)
from flask import Flask
import api
@ -12,7 +9,6 @@ app = Flask(__name__)
app.secret_key = config.FLASK_SECRET
template_filters.setup_template_filters(app)
views.setup_views(app)
api.setup_api(app)

View File

@ -1,4 +1,4 @@
version: "3"
version: "2.1"
services:
oddb:
image: simon987/od-database
@ -23,9 +23,14 @@ services:
- "RECRAWL_POOL_SIZE=10000"
- "INDEXER_THREADS=2"
depends_on:
- wsb
- tt
- es
wsb:
condition: service_started
tt:
condition: service_started
oddb_db:
condition: service_healthy
es:
condition: service_healthy
restart: always
oddb_db:
image: postgres
@ -71,22 +76,20 @@ services:
ports:
- 3010:80
depends_on:
- tt_db
tt_db:
condition: service_healthy
es:
image: docker.elastic.co/elasticsearch/elasticsearch:7.4.2
environment:
# - bootstrap.memory_lock=true
- discovery.type=single-node
# - index.number_of_shards=50
# - index.number_of_replicas=0
# - "ES_JAVA_OPTS=-Xms1G -Xmx10G"
- "ES_JAVA_OPTS=-Xms1G -Xmx10G"
volumes:
- /usr/share/elasticsearch/data
healthcheck:
test: ["CMD-SHELL", "curl --silent --fail localhost:9200/_cluster/health || exit 1"]
interval: 30s
timeout: 30s
retries: 3
interval: 5s
timeout: 5s
retries: 5

View File

@ -2,9 +2,8 @@ import os
import time
from urllib.parse import urljoin
import ujson
import elasticsearch
import ujson
from apscheduler.schedulers.background import BackgroundScheduler
from elasticsearch import helpers
@ -49,28 +48,29 @@ class ElasticSearchEngine:
logger.info("Elasticsearch first time setup")
if self.es.indices.exists(self.index_name):
self.es.indices.delete(index=self.index_name)
self.es.indices.create(index=self.index_name)
self.es.indices.close(index=self.index_name)
# Index settings
self.es.indices.put_settings(body={
"index": {
"refresh_interval": "30s",
"codec": "best_compression"
},
"analysis": {
"analyzer": {
"my_nGram": {
"tokenizer": "my_nGram_tokenizer",
"filter": ["lowercase", "asciifolding"]
}
self.es.indices.create(index=self.index_name, body={
"settings": {
"index": {
"number_of_shards": 50,
"number_of_replicas": 0,
"refresh_interval": "30s",
"codec": "best_compression"
},
"tokenizer": {
"my_nGram_tokenizer": {
"type": "nGram", "min_gram": 3, "max_gram": 3
"analysis": {
"analyzer": {
"my_nGram": {
"tokenizer": "my_nGram_tokenizer",
"filter": ["lowercase", "asciifolding"]
}
},
"tokenizer": {
"my_nGram_tokenizer": {
"type": "nGram", "min_gram": 3, "max_gram": 3
}
}
}
}}, index=self.index_name)
}
})
# Index Mappings
self.es.indices.put_mapping(body={

View File

@ -26,12 +26,8 @@
<form action="/search" id="sfrm">
<div class="form-row">
<div class="col-md-11">
<input class="form-control" name="q" id="q" placeholder="Query">
</div>
<div class="col-md-1">
<input class="btn btn-primary btn-shadow" type="submit" value="Search">
</div>
<input class="form-control" style="max-width: calc(100% - 80px);" name="q" id="q" placeholder="Query">
<input class="btn btn-primary btn-shadow" type="submit" value="Search" style="margin-left: 3px">
</div>
{% if show_captcha %}
{{ captcha.get_code()|safe }}

View File

@ -3,19 +3,24 @@ import os
from multiprocessing.pool import Pool
from urllib.parse import urlparse
from flask import render_template, redirect, request, flash, abort, Response, session
from flask_caching import Cache
import captcha
import config
import od_util
from common import db, taskManager, searchEngine, logger, require_role
from database import Website
from flask import render_template, redirect, request, flash, abort, Response, session
from flask_caching import Cache
from search.search import InvalidQueryException
from tasks import Task
def setup_views(app):
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cache = Cache(app, config={
"CACHE_TYPE": "redis",
"CACHE_REDIS_HOST": config.REDIS_HOST,
"CACHE_REDIS_PORT": config.REDIS_PORT,
})
@app.route("/dl")
@cache.cached(120)
@ -207,7 +212,8 @@ def setup_views(app):
flash("Query failed, this could mean that the search server is overloaded or is not reachable. "
"Please try again later", "danger")
results = hits["hits"]["total"]["value"] if not isinstance(hits["hits"]["total"], int) else hits["hits"]["total"] if hits else -1
results = hits["hits"]["total"]["value"] if not isinstance(hits["hits"]["total"], int) else \
hits["hits"]["total"] if hits else -1
took = hits["took"] if hits else -1
forwarded_for = request.headers["X-Forwarded-For"] if "X-Forwarded-For" in request.headers else None