Website should stay online even if elasticsearch is down / timing out

This commit is contained in:
Simon 2018-07-12 12:06:45 -04:00
parent f202caece8
commit d9e9f53f92
3 changed files with 24 additions and 13 deletions

14
app.py
View File

@ -288,6 +288,10 @@ def search():
except InvalidQueryException as e:
flash("<strong>Invalid query:</strong> " + str(e), "warning")
return redirect("/search")
except Exception:
flash("Query failed, this could mean that the search server is overloaded or is not reachable. "
"Please try again later", "danger")
hits = None
else:
hits = None
@ -313,9 +317,13 @@ def contribute():
@app.route("/")
@cache.cached(240)
def home():
stats = searchEngine.get_global_stats()
stats["website_count"] = len(db.get_all_websites())
current_websites = ", ".join(task.url for task in taskDispatcher.get_current_tasks())
try:
stats = searchEngine.get_global_stats()
stats["website_count"] = len(db.get_all_websites())
current_websites = ", ".join(task.url for task in taskDispatcher.get_current_tasks())
except:
stats = {}
current_websites = None
return render_template("home.html", stats=stats, current_websites=current_websites)

View File

@ -238,7 +238,7 @@ class ElasticSearchEngine(SearchEngine):
}
},
"size": 0
}, index=self.index_name, request_timeout=30)
}, index=self.index_name, request_timeout=20)
stats = dict()
stats["total_size"] = result["aggregations"]["total_size"]["value"]
@ -260,7 +260,7 @@ class ElasticSearchEngine(SearchEngine):
"website_id": website_id}
}
},
index=self.index_name, request_timeout=30)
index=self.index_name, request_timeout=20)
for hit in hits:
src = hit["_source"]
yield base_url + src["path"] + ("/" if src["path"] != "" else "") + src["name"] + \
@ -295,7 +295,7 @@ class ElasticSearchEngine(SearchEngine):
},
"size": 0
}, index=self.index_name, request_timeout=30)
}, index=self.index_name, request_timeout=20)
total_stats = self.es.search(body={
"query": {
@ -317,7 +317,7 @@ class ElasticSearchEngine(SearchEngine):
},
"size": 0
}, index=self.index_name, request_timeout=30)
}, index=self.index_name, request_timeout=20)
size_and_date_histogram = self.es.search(body={
"query": {
@ -352,7 +352,7 @@ class ElasticSearchEngine(SearchEngine):
}
},
"size": 0
}, index=self.index_name, request_timeout=30)
}, index=self.index_name, request_timeout=20)
website_scatter = self.es.search(body={
"query": {
@ -380,9 +380,9 @@ class ElasticSearchEngine(SearchEngine):
}
},
"size": 0
}, index=self.index_name, request_timeout=30)
}, index=self.index_name, request_timeout=20)
es_stats = self.es.indices.stats(self.index_name, request_timeout=30)
es_stats = self.es.indices.stats(self.index_name, request_timeout=20)
stats = dict()
stats["es_index_size"] = es_stats["indices"][self.index_name]["total"]["store"]["size_in_bytes"]

View File

@ -11,9 +11,12 @@
{% if stats and stats["total_size"] %}
<p class="lead">{{ stats["total_count"] }} files totalling
~{{ stats["total_size"] | filesizeformat }} from {{ stats["website_count"] }} websites</p>
{% endif %}
{% if current_websites %}
<p>Currently indexing <code>{{ current_websites }}</code><span class="vim-caret">&nbsp;</span> </p>
{% if current_websites %}
<p>Currently indexing <code>{{ current_websites }}</code><span class="vim-caret">&nbsp;</span> </p>
{% endif %}
{% else %}
<p class="lead">We're currently experiencing a high volume of traffic. The search function
may be unresponsive.</p>
{% endif %}
<p></p>
</div>