mirror of
https://github.com/simon987/sist2.git
synced 2025-12-12 15:08:53 +00:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ec518887ee | |||
| 0b0b7fe951 | |||
| ba863e4e6c | |||
| cbab4c2841 | |||
| 930361e78c | |||
| 92478ec47c | |||
| 0d81d7c43b | |||
| 9f175cb0f0 | |||
| 6225cf81de | |||
| d7058ab645 | |||
| 84958502b1 | |||
| a0b6eed037 | |||
| 06d6910151 | |||
| b99e4ddf13 | |||
| d14139ba44 | |||
|
|
13960337aa | ||
| 2596361af5 | |||
| 5a1a04629f | |||
| 242dd67416 | |||
| 54d902146a | |||
| 3b0ab3679a | |||
|
|
58ce0ef414 | ||
| f984baf7fd | |||
| ce242d1053 | |||
| 71deab7fa2 | |||
|
|
b0462f9378 |
@@ -1,11 +1,6 @@
|
||||
FROM simon987/sist2-build as build
|
||||
MAINTAINER simon987 <me@simon987.net>
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash
|
||||
RUN apt update -y; apt install -y nodejs && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /build/
|
||||
|
||||
COPY scripts scripts
|
||||
@@ -25,7 +20,6 @@ RUN strip build/sist2 || mv build/sist2_debug build/sist2
|
||||
|
||||
FROM --platform="linux/amd64" ubuntu@sha256:965fbcae990b0467ed5657caceaec165018ef44a4d2d46c7cdea80a9dff0d1ea
|
||||
|
||||
|
||||
ENV LANG C.UTF-8
|
||||
ENV LC_ALL C.UTF-8
|
||||
|
||||
|
||||
@@ -1,6 +1,19 @@
|
||||
FROM simon987/sist2-build-arm64 as build
|
||||
MAINTAINER simon987 <me@simon987.net>
|
||||
|
||||
WORKDIR /build/
|
||||
|
||||
COPY scripts scripts
|
||||
COPY schema schema
|
||||
COPY CMakeLists.txt .
|
||||
COPY third-party third-party
|
||||
COPY src src
|
||||
COPY sist2-vue sist2-vue
|
||||
COPY sist2-admin sist2-admin
|
||||
|
||||
RUN cd sist2-vue/ && npm install && npm run build
|
||||
RUN cd sist2-admin/frontend/ && npm install && npm run build
|
||||
|
||||
WORKDIR /build/
|
||||
ADD . /build/
|
||||
RUN mkdir build && cd build && cmake -DSIST_PLATFORM=arm64_linux_docker -DSIST_DEBUG_INFO=on -DSIST_DEBUG=off -DBUILD_TESTS=off -DCMAKE_TOOLCHAIN_FILE=/vcpkg/scripts/buildsystems/vcpkg.cmake ..
|
||||
|
||||
@@ -206,7 +206,7 @@ docker run --rm --entrypoint cat my-sist2-image /root/sist2 > sist2-x64-linux
|
||||
3. Install vcpkg dependencies
|
||||
|
||||
```bash
|
||||
vcpkg install curl[core,openssl] sqlite3[core,fts5] cpp-jwt pcre cjson brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libmagic libraw gumbo ffmpeg[core,avcodec,avformat,swscale,swresample,webp]
|
||||
vcpkg install curl[core,openssl] sqlite3[core,fts5] cpp-jwt pcre cjson brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libmagic libraw gumbo ffmpeg[core,avcodec,avformat,swscale,swresample,webp,opus,mp3lame,vpx,ffprobe,zlib]
|
||||
```
|
||||
|
||||
4. Build
|
||||
|
||||
@@ -4,6 +4,20 @@ VCPKG_ROOT="/vcpkg"
|
||||
|
||||
git submodule update --init --recursive
|
||||
|
||||
(
|
||||
cd sist2-vue/
|
||||
npm install
|
||||
npm run build
|
||||
) &
|
||||
|
||||
(
|
||||
cd sist2-admin/frontend/
|
||||
npm install
|
||||
npm run build
|
||||
) &
|
||||
|
||||
wait
|
||||
|
||||
mkdir build
|
||||
(
|
||||
cd build
|
||||
|
||||
@@ -4,6 +4,20 @@ VCPKG_ROOT="/vcpkg"
|
||||
|
||||
git submodule update --init --recursive
|
||||
|
||||
(
|
||||
cd sist2-vue/
|
||||
npm install
|
||||
npm run build
|
||||
) &
|
||||
|
||||
(
|
||||
cd sist2-admin/frontend/
|
||||
npm install
|
||||
npm run build
|
||||
) &
|
||||
|
||||
wait
|
||||
|
||||
mkdir build
|
||||
(
|
||||
cd build
|
||||
|
||||
2033
sist2-admin/frontend/package-lock.json
generated
2033
sist2-admin/frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,15 @@
|
||||
<template>
|
||||
<div id="app">
|
||||
<NavBar></NavBar>
|
||||
<b-container class="pt-4">
|
||||
<b-alert show dismissible variant="info">
|
||||
This is a beta version of sist2-admin. Please submit bug reports, usability issues and feature requests
|
||||
to the <a href="https://github.com/simon987/sist2/issues/new/choose" target="_blank">issue tracker on Github</a>. Thank you!
|
||||
</b-alert>
|
||||
<router-view/>
|
||||
</b-container>
|
||||
</div>
|
||||
<div id="app">
|
||||
<NavBar></NavBar>
|
||||
<b-container class="pt-4">
|
||||
<b-alert show dismissible variant="info">
|
||||
This is a beta version of sist2-admin. Please submit bug reports, usability issues and feature requests
|
||||
to the <a href="https://github.com/simon987/sist2/issues/new/choose" target="_blank">issue tracker on
|
||||
Github</a>. Thank you!
|
||||
</b-alert>
|
||||
<router-view/>
|
||||
</b-container>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
@@ -16,83 +17,87 @@ import NavBar from "@/components/NavBar";
|
||||
import Sist2AdminApi from "@/Sist2AdminApi";
|
||||
|
||||
export default {
|
||||
components: {NavBar},
|
||||
data() {
|
||||
return {
|
||||
socket: null
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
Sist2AdminApi.getSist2AdminInfo()
|
||||
.then(resp => this.$store.commit("setSist2AdminInfo", resp.data));
|
||||
this.$store.dispatch("loadBrowserSettings");
|
||||
this.connectNotifications();
|
||||
// this.socket.onclose = this.connectNotifications;
|
||||
},
|
||||
methods: {
|
||||
connectNotifications() {
|
||||
this.socket = new WebSocket(`ws://${window.location.host}/notifications`);
|
||||
this.socket.onopen = () => {
|
||||
this.socket.send("Hello from client");
|
||||
}
|
||||
|
||||
this.socket.onmessage = e => {
|
||||
const notification = JSON.parse(e.data);
|
||||
if (notification.message) {
|
||||
notification.messageString = this.$t(notification.message).toString();
|
||||
components: {NavBar},
|
||||
data() {
|
||||
return {
|
||||
socket: null
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
Sist2AdminApi.getSist2AdminInfo()
|
||||
.then(resp => this.$store.commit("setSist2AdminInfo", resp.data));
|
||||
this.$store.dispatch("loadBrowserSettings");
|
||||
this.connectNotifications();
|
||||
// this.socket.onclose = this.connectNotifications;
|
||||
},
|
||||
methods: {
|
||||
connectNotifications() {
|
||||
if (window.location.protocol === "https:") {
|
||||
this.socket = new WebSocket(`wss://${window.location.host}/notifications`);
|
||||
} else {
|
||||
this.socket = new WebSocket(`ws://${window.location.host}/notifications`);
|
||||
}
|
||||
this.socket.onopen = () => {
|
||||
this.socket.send("Hello from client");
|
||||
}
|
||||
|
||||
this.$store.dispatch("notify", notification)
|
||||
}
|
||||
this.socket.onmessage = e => {
|
||||
const notification = JSON.parse(e.data);
|
||||
if (notification.message) {
|
||||
notification.messageString = this.$t(notification.message).toString();
|
||||
}
|
||||
|
||||
this.$store.dispatch("notify", notification)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
html, body {
|
||||
height: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#app {
|
||||
/*font-family: Avenir, Helvetica, Arial, sans-serif;*/
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
/*text-align: center;*/
|
||||
color: #2c3e50;
|
||||
padding-bottom: 1em;
|
||||
min-height: 100%;
|
||||
/*font-family: Avenir, Helvetica, Arial, sans-serif;*/
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
/*text-align: center;*/
|
||||
color: #2c3e50;
|
||||
padding-bottom: 1em;
|
||||
min-height: 100%;
|
||||
}
|
||||
|
||||
.info-icon {
|
||||
width: 1rem;
|
||||
margin-right: 0.2rem;
|
||||
cursor: pointer;
|
||||
line-height: 1rem;
|
||||
height: 1rem;
|
||||
background-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB4PSIwcHgiIHk9IjBweCIKICAgICB2aWV3Qm94PSIwIDAgNDI2LjY2NyA0MjYuNjY3IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCA0MjYuNjY3IDQyNi42Njc7IiBmaWxsPSIjZmZmIj4KPGc+CiAgICA8Zz4KICAgICAgICA8Zz4KICAgICAgICAgICAgPHJlY3QgeD0iMTkyIiB5PSIxOTIiIHdpZHRoPSI0Mi42NjciIGhlaWdodD0iMTI4Ii8+CiAgICAgICAgICAgIDxwYXRoIGQ9Ik0yMTMuMzMzLDBDOTUuNDY3LDAsMCw5NS40NjcsMCwyMTMuMzMzczk1LjQ2NywyMTMuMzMzLDIxMy4zMzMsMjEzLjMzM1M0MjYuNjY3LDMzMS4yLDQyNi42NjcsMjEzLjMzMwogICAgICAgICAgICAgICAgUzMzMS4yLDAsMjEzLjMzMywweiBNMjEzLjMzMywzODRjLTk0LjA4LDAtMTcwLjY2Ny03Ni41ODctMTcwLjY2Ny0xNzAuNjY3UzExOS4yNTMsNDIuNjY3LDIxMy4zMzMsNDIuNjY3CiAgICAgICAgICAgICAgICBTMzg0LDExOS4yNTMsMzg0LDIxMy4zMzNTMzA3LjQxMywzODQsMjEzLjMzMywzODR6Ii8+CiAgICAgICAgICAgIDxyZWN0IHg9IjE5MiIgeT0iMTA2LjY2NyIgd2lkdGg9IjQyLjY2NyIgaGVpZ2h0PSI0Mi42NjciLz4KICAgICAgICA8L2c+CiAgICA8L2c+CjwvZz4KPC9zdmc+Cg==);
|
||||
filter: brightness(45%);
|
||||
display: block;
|
||||
width: 1rem;
|
||||
margin-right: 0.2rem;
|
||||
cursor: pointer;
|
||||
line-height: 1rem;
|
||||
height: 1rem;
|
||||
background-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB4PSIwcHgiIHk9IjBweCIKICAgICB2aWV3Qm94PSIwIDAgNDI2LjY2NyA0MjYuNjY3IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCA0MjYuNjY3IDQyNi42Njc7IiBmaWxsPSIjZmZmIj4KPGc+CiAgICA8Zz4KICAgICAgICA8Zz4KICAgICAgICAgICAgPHJlY3QgeD0iMTkyIiB5PSIxOTIiIHdpZHRoPSI0Mi42NjciIGhlaWdodD0iMTI4Ii8+CiAgICAgICAgICAgIDxwYXRoIGQ9Ik0yMTMuMzMzLDBDOTUuNDY3LDAsMCw5NS40NjcsMCwyMTMuMzMzczk1LjQ2NywyMTMuMzMzLDIxMy4zMzMsMjEzLjMzM1M0MjYuNjY3LDMzMS4yLDQyNi42NjcsMjEzLjMzMwogICAgICAgICAgICAgICAgUzMzMS4yLDAsMjEzLjMzMywweiBNMjEzLjMzMywzODRjLTk0LjA4LDAtMTcwLjY2Ny03Ni41ODctMTcwLjY2Ny0xNzAuNjY3UzExOS4yNTMsNDIuNjY3LDIxMy4zMzMsNDIuNjY3CiAgICAgICAgICAgICAgICBTMzg0LDExOS4yNTMsMzg0LDIxMy4zMzNTMzA3LjQxMywzODQsMjEzLjMzMywzODR6Ii8+CiAgICAgICAgICAgIDxyZWN0IHg9IjE5MiIgeT0iMTA2LjY2NyIgd2lkdGg9IjQyLjY2NyIgaGVpZ2h0PSI0Mi42NjciLz4KICAgICAgICA8L2c+CiAgICA8L2c+CjwvZz4KPC9zdmc+Cg==);
|
||||
filter: brightness(45%);
|
||||
display: block;
|
||||
}
|
||||
|
||||
.tabs {
|
||||
margin-top: 10px;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.modal-title {
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 1500px) {
|
||||
.container {
|
||||
max-width: 1440px;
|
||||
}
|
||||
.container {
|
||||
max-width: 1440px;
|
||||
}
|
||||
}
|
||||
|
||||
label {
|
||||
margin-top: 0.5rem;
|
||||
margin-bottom: 0;
|
||||
margin-top: 0.5rem;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -7,15 +7,15 @@ class Sist2AdminApi {
|
||||
}
|
||||
|
||||
getJobs() {
|
||||
return axios.get(`${this.baseUrl}/api/job/`);
|
||||
return axios.get(`${this.baseUrl}/api/job`);
|
||||
}
|
||||
|
||||
getFrontends() {
|
||||
return axios.get(`${this.baseUrl}/api/frontend/`);
|
||||
return axios.get(`${this.baseUrl}/api/frontend`);
|
||||
}
|
||||
|
||||
getTasks() {
|
||||
return axios.get(`${this.baseUrl}/api/task/`);
|
||||
return axios.get(`${this.baseUrl}/api/task`);
|
||||
}
|
||||
|
||||
killTask(taskId) {
|
||||
@@ -42,7 +42,7 @@ class Sist2AdminApi {
|
||||
}
|
||||
|
||||
getSearchBackends() {
|
||||
return axios.get(`${this.baseUrl}/api/search_backend/`);
|
||||
return axios.get(`${this.baseUrl}/api/search_backend`);
|
||||
}
|
||||
|
||||
deleteBackend(name) {
|
||||
@@ -127,7 +127,7 @@ class Sist2AdminApi {
|
||||
}
|
||||
|
||||
getSist2AdminInfo() {
|
||||
return axios.get(`${this.baseUrl}/api/`);
|
||||
return axios.get(`${this.baseUrl}/api`);
|
||||
}
|
||||
|
||||
getLogsToDelete(jobName, n) {
|
||||
|
||||
@@ -1,168 +1,172 @@
|
||||
<template>
|
||||
<b-card>
|
||||
<b-card-body>
|
||||
<b-card>
|
||||
<b-card-body>
|
||||
|
||||
<h4 class="mb-3">{{ taskId }} {{ $t("logs") }}</h4>
|
||||
<h4 class="mb-3">{{ taskId }} {{ $t("logs") }}</h4>
|
||||
|
||||
<div v-if="$store.state.sist2AdminInfo">
|
||||
{{ $t("logFile") }}
|
||||
<code>{{ $store.state.sist2AdminInfo.logs_folder }}/sist2-{{ taskId }}.log</code>
|
||||
<br/>
|
||||
<br/>
|
||||
</div>
|
||||
<div v-if="$store.state.sist2AdminInfo">
|
||||
{{ $t("logFile") }}
|
||||
<code>{{ $store.state.sist2AdminInfo.logs_folder }}/sist2-{{ taskId }}.log</code>
|
||||
<br/>
|
||||
<br/>
|
||||
</div>
|
||||
|
||||
<b-row>
|
||||
<b-col>
|
||||
<span>{{ $t("logLevel") }}</span>
|
||||
<b-select :options="levels.slice(0, -1)" v-model="logLevel" @input="connect()"></b-select>
|
||||
</b-col>
|
||||
<b-col>
|
||||
<span>{{ $t("logMode") }}</span>
|
||||
<b-select :options="modeOptions" v-model="mode" @input="connect()"></b-select>
|
||||
</b-col>
|
||||
</b-row>
|
||||
<b-row>
|
||||
<b-col>
|
||||
<span>{{ $t("logLevel") }}</span>
|
||||
<b-select :options="levels.slice(0, -1)" v-model="logLevel" @input="connect()"></b-select>
|
||||
</b-col>
|
||||
<b-col>
|
||||
<span>{{ $t("logMode") }}</span>
|
||||
<b-select :options="modeOptions" v-model="mode" @input="connect()"></b-select>
|
||||
</b-col>
|
||||
</b-row>
|
||||
|
||||
<div id="log-tail-output" class="mt-3 ml-1"></div>
|
||||
<div id="log-tail-output" class="mt-3 ml-1"></div>
|
||||
|
||||
</b-card-body>
|
||||
</b-card>
|
||||
</b-card-body>
|
||||
</b-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
export default {
|
||||
name: "Tail",
|
||||
data() {
|
||||
return {
|
||||
logLevel: "DEBUG",
|
||||
levels: ["DEBUG", "INFO", "WARNING", "ERROR", "ADMIN", "FATAL"],
|
||||
socket: null,
|
||||
mode: "follow",
|
||||
modeOptions: [
|
||||
{
|
||||
"text": this.$t('follow'),
|
||||
"value": "follow"
|
||||
},
|
||||
{
|
||||
"text": this.$t('wholeFile'),
|
||||
"value": "wholeFile"
|
||||
name: "Tail",
|
||||
data() {
|
||||
return {
|
||||
logLevel: "DEBUG",
|
||||
levels: ["DEBUG", "INFO", "WARNING", "ERROR", "ADMIN", "FATAL"],
|
||||
socket: null,
|
||||
mode: "follow",
|
||||
modeOptions: [
|
||||
{
|
||||
"text": this.$t('follow'),
|
||||
"value": "follow"
|
||||
},
|
||||
{
|
||||
"text": this.$t('wholeFile'),
|
||||
"value": "wholeFile"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
computed: {
|
||||
taskId: function () {
|
||||
return this.$route.params.taskId;
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
connect() {
|
||||
let lineCount = 0;
|
||||
const outputElem = document.getElementById("log-tail-output")
|
||||
outputElem.replaceChildren();
|
||||
if (this.socket !== null) {
|
||||
this.socket.close();
|
||||
}
|
||||
|
||||
const n = this.mode === "follow" ? 32 : 9999999999;
|
||||
if (window.location.protocol === "https:") {
|
||||
this.socket = new WebSocket(`wss://${window.location.host}/log/${this.taskId}?n=${n}`);
|
||||
} else {
|
||||
this.socket = new WebSocket(`ws://${window.location.host}/log/${this.taskId}?n=${n}`);
|
||||
}
|
||||
this.socket.onopen = () => {
|
||||
this.socket.send("Hello from client");
|
||||
}
|
||||
|
||||
this.socket.onmessage = e => {
|
||||
let message;
|
||||
try {
|
||||
message = JSON.parse(e.data);
|
||||
} catch {
|
||||
console.error(e.data)
|
||||
return;
|
||||
}
|
||||
|
||||
if ("ping" in message) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.level === undefined) {
|
||||
|
||||
if ("stderr" in message) {
|
||||
message.level = "ERROR";
|
||||
message.message = message["stderr"];
|
||||
} else {
|
||||
message.level = "ADMIN";
|
||||
message.message = message["sist2-admin"];
|
||||
}
|
||||
message.datetime = ""
|
||||
message.filepath = ""
|
||||
}
|
||||
|
||||
if (this.levels.indexOf(message.level) < this.levels.indexOf(this.logLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const logLine = `${message.datetime} [${message.level} ${message.filepath}] ${message.message}`;
|
||||
|
||||
const span = document.createElement("span");
|
||||
span.setAttribute("class", message.level);
|
||||
span.appendChild(document.createTextNode(logLine));
|
||||
|
||||
outputElem.appendChild(span);
|
||||
lineCount += 1;
|
||||
|
||||
if (this.mode === "follow" && lineCount >= n) {
|
||||
outputElem.firstChild.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.connect()
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
taskId: function () {
|
||||
return this.$route.params.taskId;
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
connect() {
|
||||
let lineCount = 0;
|
||||
const outputElem = document.getElementById("log-tail-output")
|
||||
outputElem.replaceChildren();
|
||||
if (this.socket !== null) {
|
||||
this.socket.close();
|
||||
}
|
||||
|
||||
const n = this.mode === "follow" ? 32 : 9999999999;
|
||||
this.socket = new WebSocket(`ws://${window.location.host}/log/${this.taskId}?n=${n}`);
|
||||
this.socket.onopen = () => {
|
||||
this.socket.send("Hello from client");
|
||||
}
|
||||
|
||||
this.socket.onmessage = e => {
|
||||
let message;
|
||||
try {
|
||||
message = JSON.parse(e.data);
|
||||
} catch {
|
||||
console.error(e.data)
|
||||
return;
|
||||
}
|
||||
|
||||
if ("ping" in message) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.level === undefined) {
|
||||
|
||||
if ("stderr" in message) {
|
||||
message.level = "ERROR";
|
||||
message.message = message["stderr"];
|
||||
} else {
|
||||
message.level = "ADMIN";
|
||||
message.message = message["sist2-admin"];
|
||||
}
|
||||
message.datetime = ""
|
||||
message.filepath = ""
|
||||
}
|
||||
|
||||
if (this.levels.indexOf(message.level) < this.levels.indexOf(this.logLevel)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const logLine = `${message.datetime} [${message.level} ${message.filepath}] ${message.message}`;
|
||||
|
||||
const span = document.createElement("span");
|
||||
span.setAttribute("class", message.level);
|
||||
span.appendChild(document.createTextNode(logLine));
|
||||
|
||||
outputElem.appendChild(span);
|
||||
lineCount += 1;
|
||||
|
||||
if (this.mode === "follow" && lineCount >= n) {
|
||||
outputElem.firstChild.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.connect()
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<style>
|
||||
#log-tail-output span {
|
||||
display: block;
|
||||
display: block;
|
||||
}
|
||||
|
||||
span.DEBUG {
|
||||
color: #9E9E9E;
|
||||
color: #9E9E9E;
|
||||
}
|
||||
|
||||
span.WARNING {
|
||||
color: #FFB300;
|
||||
color: #FFB300;
|
||||
}
|
||||
|
||||
span.INFO {
|
||||
color: #039BE5;
|
||||
color: #039BE5;
|
||||
}
|
||||
|
||||
span.ERROR {
|
||||
color: #F4511E;
|
||||
color: #F4511E;
|
||||
}
|
||||
|
||||
span.FATAL {
|
||||
color: #F4511E;
|
||||
color: #F4511E;
|
||||
}
|
||||
|
||||
span.ADMIN {
|
||||
color: #ee05ff;
|
||||
color: #ee05ff;
|
||||
}
|
||||
|
||||
|
||||
#log-tail-output {
|
||||
font-size: 13px;
|
||||
font-family: monospace;
|
||||
font-size: 13px;
|
||||
font-family: monospace;
|
||||
|
||||
padding: 6px;
|
||||
background-color: #f5f5f5;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
margin: 3px;
|
||||
white-space: pre;
|
||||
color: #000;
|
||||
overflow: hidden;
|
||||
padding: 6px;
|
||||
background-color: #f5f5f5;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
margin: 3px;
|
||||
white-space: pre;
|
||||
color: #000;
|
||||
overflow: hidden;
|
||||
}
|
||||
</style>
|
||||
File diff suppressed because it is too large
Load Diff
@@ -187,6 +187,7 @@ class Sist2Index:
|
||||
def name(self) -> str:
|
||||
return self._descriptor["name"]
|
||||
|
||||
|
||||
class WebOptions(BaseModel):
|
||||
indices: List[str] = []
|
||||
|
||||
@@ -212,7 +213,8 @@ class WebOptions(BaseModel):
|
||||
f"--lang={self.lang}"]
|
||||
|
||||
if search_backend.backend_type == SearchBackendType("sqlite"):
|
||||
args.append(f"--search-index={search_backend.search_index}")
|
||||
search_index_absolute = os.path.join(DATA_FOLDER, search_backend.search_index)
|
||||
args.append(f"--search-index={search_index_absolute}")
|
||||
else:
|
||||
args.append(f"--es-url={search_backend.es_url}")
|
||||
args.append(f"--es-index={search_backend.es_index}")
|
||||
@@ -238,6 +240,7 @@ class WebOptions(BaseModel):
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class Sist2:
|
||||
|
||||
def __init__(self, bin_path: str, data_directory: str):
|
||||
@@ -357,5 +360,3 @@ class Sist2:
|
||||
t_stdout.start()
|
||||
|
||||
return proc.pid
|
||||
|
||||
|
||||
|
||||
@@ -65,8 +65,8 @@ def get_log_files_to_remove(db: PersistentState, job_name: str, n: int):
|
||||
if row["name"].endswith(f"[{job_name}]"):
|
||||
counter += 1
|
||||
|
||||
if counter > n:
|
||||
to_remove.append(row)
|
||||
if counter > n:
|
||||
to_remove.append(row)
|
||||
|
||||
return to_remove
|
||||
|
||||
|
||||
1015
sist2-vue/package-lock.json
generated
1015
sist2-vue/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@
|
||||
"axios": "^0.25.0",
|
||||
"bootstrap-vue": "^2.21.2",
|
||||
"core-js": "^3.6.5",
|
||||
"d3": "^7.8.4",
|
||||
"d3": "^5.6.1",
|
||||
"date-fns": "^2.21.3",
|
||||
"dom-to-image": "^2.6.0",
|
||||
"fslightbox-vue": "fslightbox-vue.tgz",
|
||||
|
||||
@@ -531,8 +531,8 @@ class Sist2Api {
|
||||
size: 0
|
||||
}).then(res => {
|
||||
const range = {
|
||||
min: res.aggregations.dateMin.value,
|
||||
max: res.aggregations.dateMax.value,
|
||||
min: res.aggregations.dateMin.value / 1000,
|
||||
max: res.aggregations.dateMax.value / 1000,
|
||||
}
|
||||
|
||||
if (range.min == null) {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
<template>
|
||||
<div>
|
||||
<b-btn style="float:right;margin-bottom: 10px" @click="downloadTreemap()" variant="primary">
|
||||
{{ $t("download") }}
|
||||
</b-btn>
|
||||
<svg id="treemap"></svg>
|
||||
</div>
|
||||
<div>
|
||||
<b-btn style="float:right;margin-bottom: 10px" @click="downloadTreemap()" variant="primary">
|
||||
{{ $t("download") }}
|
||||
</b-btn>
|
||||
<svg id="treemap"></svg>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
@@ -16,252 +16,252 @@ import domtoimage from "dom-to-image";
|
||||
|
||||
|
||||
const TILING_MODES = {
|
||||
"squarify": d3.treemapSquarify,
|
||||
"binary": d3.treemapBinary,
|
||||
"sliceDice": d3.treemapSliceDice,
|
||||
"slice": d3.treemapSlice,
|
||||
"dice": d3.treemapDice,
|
||||
"squarify": d3.treemapSquarify,
|
||||
"binary": d3.treemapBinary,
|
||||
"sliceDice": d3.treemapSliceDice,
|
||||
"slice": d3.treemapSlice,
|
||||
"dice": d3.treemapDice,
|
||||
};
|
||||
|
||||
const COLORS = {
|
||||
"PuBuGn": d3.interpolatePuBuGn,
|
||||
"PuRd": d3.interpolatePuRd,
|
||||
"PuBu": d3.interpolatePuBu,
|
||||
"YlOrBr": d3.interpolateYlOrBr,
|
||||
"YlOrRd": d3.interpolateYlOrRd,
|
||||
"YlGn": d3.interpolateYlGn,
|
||||
"YlGnBu": d3.interpolateYlGnBu,
|
||||
"Plasma": d3.interpolatePlasma,
|
||||
"Magma": d3.interpolateMagma,
|
||||
"Inferno": d3.interpolateInferno,
|
||||
"Viridis": d3.interpolateViridis,
|
||||
"Turbo": d3.interpolateTurbo,
|
||||
"PuBuGn": d3.interpolatePuBuGn,
|
||||
"PuRd": d3.interpolatePuRd,
|
||||
"PuBu": d3.interpolatePuBu,
|
||||
"YlOrBr": d3.interpolateYlOrBr,
|
||||
"YlOrRd": d3.interpolateYlOrRd,
|
||||
"YlGn": d3.interpolateYlGn,
|
||||
"YlGnBu": d3.interpolateYlGnBu,
|
||||
"Plasma": d3.interpolatePlasma,
|
||||
"Magma": d3.interpolateMagma,
|
||||
"Inferno": d3.interpolateInferno,
|
||||
"Viridis": d3.interpolateViridis,
|
||||
"Turbo": d3.interpolateTurbo,
|
||||
};
|
||||
|
||||
const SIZES = {
|
||||
"small": [800, 600],
|
||||
"medium": [1300, 750],
|
||||
"large": [1900, 900],
|
||||
"x-large": [2800, 1700],
|
||||
"xx-large": [3600, 2000],
|
||||
"small": [800, 600],
|
||||
"medium": [1300, 750],
|
||||
"large": [1900, 900],
|
||||
"x-large": [2800, 1700],
|
||||
"xx-large": [3600, 2000],
|
||||
};
|
||||
|
||||
|
||||
const uids = {};
|
||||
|
||||
function uid(name) {
|
||||
let id = uids[name] || 0;
|
||||
uids[name] = id + 1;
|
||||
return name + id;
|
||||
let id = uids[name] || 0;
|
||||
uids[name] = id + 1;
|
||||
return name + id;
|
||||
}
|
||||
|
||||
function cascade(root, offset) {
|
||||
const x = new Map;
|
||||
const y = new Map;
|
||||
return root.eachAfter(d => {
|
||||
if (d.children && d.children.length !== 0) {
|
||||
x.set(d, 1 + d3.max(d.children, c => c.x1 === d.x1 - offset ? x.get(c) : NaN));
|
||||
y.set(d, 1 + d3.max(d.children, c => c.y1 === d.y1 - offset ? y.get(c) : NaN));
|
||||
} else {
|
||||
x.set(d, 0);
|
||||
y.set(d, 0);
|
||||
}
|
||||
}).eachBefore(d => {
|
||||
d.x1 -= 2 * offset * x.get(d);
|
||||
d.y1 -= 2 * offset * y.get(d);
|
||||
});
|
||||
const x = new Map;
|
||||
const y = new Map;
|
||||
return root.eachAfter(d => {
|
||||
if (d.children && d.children.length !== 0) {
|
||||
x.set(d, 1 + d3.max(d.children, c => c.x1 === d.x1 - offset ? x.get(c) : NaN));
|
||||
y.set(d, 1 + d3.max(d.children, c => c.y1 === d.y1 - offset ? y.get(c) : NaN));
|
||||
} else {
|
||||
x.set(d, 0);
|
||||
y.set(d, 0);
|
||||
}
|
||||
}).eachBefore(d => {
|
||||
d.x1 -= 2 * offset * x.get(d);
|
||||
d.y1 -= 2 * offset * y.get(d);
|
||||
});
|
||||
}
|
||||
|
||||
function cascadeTreemap(data, svg, width, height, tilingMode, treemapColor) {
|
||||
const root = cascade(
|
||||
d3.treemap()
|
||||
.size([width, height])
|
||||
.tile(TILING_MODES[tilingMode])
|
||||
.paddingOuter(3)
|
||||
.paddingTop(16)
|
||||
.paddingInner(1)
|
||||
.round(true)(
|
||||
d3.hierarchy(data)
|
||||
.sum(d => d.value)
|
||||
.sort((a, b) => b.value - a.value)
|
||||
),
|
||||
3 // treemap.paddingOuter
|
||||
);
|
||||
const root = cascade(
|
||||
d3.treemap()
|
||||
.size([width, height])
|
||||
.tile(TILING_MODES[tilingMode])
|
||||
.paddingOuter(3)
|
||||
.paddingTop(16)
|
||||
.paddingInner(1)
|
||||
.round(true)(
|
||||
d3.hierarchy(data)
|
||||
.sum(d => d.value)
|
||||
.sort((a, b) => b.value - a.value)
|
||||
),
|
||||
3 // treemap.paddingOuter
|
||||
);
|
||||
|
||||
const maxDepth = Math.max(...root.descendants().map(d => d.depth));
|
||||
const color = d3.scaleSequential([maxDepth, -1], COLORS[treemapColor]);
|
||||
const maxDepth = Math.max(...root.descendants().map(d => d.depth));
|
||||
const color = d3.scaleSequential([maxDepth, -1], COLORS[treemapColor]);
|
||||
|
||||
svg.append("filter")
|
||||
.attr("id", "shadow")
|
||||
.append("feDropShadow")
|
||||
.attr("flood-opacity", 0.3)
|
||||
.attr("dx", 0)
|
||||
.attr("stdDeviation", 3);
|
||||
svg.append("filter")
|
||||
.attr("id", "shadow")
|
||||
.append("feDropShadow")
|
||||
.attr("flood-opacity", 0.3)
|
||||
.attr("dx", 0)
|
||||
.attr("stdDeviation", 3);
|
||||
|
||||
const node = svg.selectAll("g")
|
||||
.data(
|
||||
d3.nest()
|
||||
.key(d => d.depth).sortKeys(d3.ascending)
|
||||
.entries(root.descendants())
|
||||
)
|
||||
.join("g")
|
||||
.attr("filter", "url(#shadow)")
|
||||
.selectAll("g")
|
||||
.data(d => d.values)
|
||||
.join("g")
|
||||
.attr("transform", d => `translate(${d.x0},${d.y0})`);
|
||||
const node = svg.selectAll("g")
|
||||
.data(
|
||||
d3.nest()
|
||||
.key(d => d.depth).sortKeys(d3.ascending)
|
||||
.entries(root.descendants())
|
||||
)
|
||||
.join("g")
|
||||
.attr("filter", "url(#shadow)")
|
||||
.selectAll("g")
|
||||
.data(d => d.values)
|
||||
.join("g")
|
||||
.attr("transform", d => `translate(${d.x0},${d.y0})`);
|
||||
|
||||
node.append("title")
|
||||
.text(d => `${d.ancestors().reverse().splice(1).map(d => d.data.name).join("/")}\n${humanFileSize(d.value)}`);
|
||||
node.append("title")
|
||||
.text(d => `${d.ancestors().reverse().splice(1).map(d => d.data.name).join("/")}\n${humanFileSize(d.value)}`);
|
||||
|
||||
node.append("rect")
|
||||
.attr("id", d => (d.nodeUid = uid("node")))
|
||||
.attr("fill", d => color(d.depth))
|
||||
.attr("width", d => d.x1 - d.x0)
|
||||
.attr("height", d => d.y1 - d.y0);
|
||||
node.append("rect")
|
||||
.attr("id", d => (d.nodeUid = uid("node")))
|
||||
.attr("fill", d => color(d.depth))
|
||||
.attr("width", d => d.x1 - d.x0)
|
||||
.attr("height", d => d.y1 - d.y0);
|
||||
|
||||
node.append("clipPath")
|
||||
.attr("id", d => (d.clipUid = uid("clip")))
|
||||
.append("use")
|
||||
.attr("href", d => `#${d.nodeUid}`);
|
||||
node.append("clipPath")
|
||||
.attr("id", d => (d.clipUid = uid("clip")))
|
||||
.append("use")
|
||||
.attr("href", d => `#${d.nodeUid}`);
|
||||
|
||||
node.append("text")
|
||||
.attr("fill", d => d3.hsl(color(d.depth)).l > .5 ? "#333" : "#eee")
|
||||
.attr("clip-path", d => `url(#${d.clipUid})`)
|
||||
.selectAll("tspan")
|
||||
.data(d => [d.data.name, humanFileSize(d.value)])
|
||||
.join("tspan")
|
||||
.text(d => d);
|
||||
node.append("text")
|
||||
.attr("fill", d => d3.hsl(color(d.depth)).l > .5 ? "#333" : "#eee")
|
||||
.attr("clip-path", d => `url(#${d.clipUid})`)
|
||||
.selectAll("tspan")
|
||||
.data(d => [d.data.name, humanFileSize(d.value)])
|
||||
.join("tspan")
|
||||
.text(d => d);
|
||||
|
||||
node.filter(d => d.children).selectAll("tspan")
|
||||
.attr("dx", 3)
|
||||
.attr("y", 13);
|
||||
node.filter(d => d.children).selectAll("tspan")
|
||||
.attr("dx", 3)
|
||||
.attr("y", 13);
|
||||
|
||||
node.filter(d => !d.children).selectAll("tspan")
|
||||
.attr("x", 3)
|
||||
.attr("y", (d, i) => `${i === 0 ? 1.1 : 2.3}em`);
|
||||
node.filter(d => !d.children).selectAll("tspan")
|
||||
.attr("x", 3)
|
||||
.attr("y", (d, i) => `${i === 0 ? 1.1 : 2.3}em`);
|
||||
}
|
||||
|
||||
function flatTreemap(data, svg, width, height, groupingDepth, tilingMode, fillOpacity) {
|
||||
const ordinalColor = d3.scaleOrdinal(d3.schemeCategory10);
|
||||
const ordinalColor = d3.scaleOrdinal(d3.schemeCategory10);
|
||||
|
||||
const root = d3.treemap()
|
||||
.tile(TILING_MODES[tilingMode])
|
||||
.size([width, height])
|
||||
.padding(1)
|
||||
.round(true)(
|
||||
d3.hierarchy(data)
|
||||
.sum(d => d.value)
|
||||
.sort((a, b) => b.value - a.value)
|
||||
);
|
||||
const root = d3.treemap()
|
||||
.tile(TILING_MODES[tilingMode])
|
||||
.size([width, height])
|
||||
.padding(1)
|
||||
.round(true)(
|
||||
d3.hierarchy(data)
|
||||
.sum(d => d.value)
|
||||
.sort((a, b) => b.value - a.value)
|
||||
);
|
||||
|
||||
const leaf = svg.selectAll("g")
|
||||
.data(root.leaves())
|
||||
.join("g")
|
||||
.attr("transform", d => `translate(${d.x0},${d.y0})`);
|
||||
const leaf = svg.selectAll("g")
|
||||
.data(root.leaves())
|
||||
.join("g")
|
||||
.attr("transform", d => `translate(${d.x0},${d.y0})`);
|
||||
|
||||
leaf.append("title")
|
||||
.text(d => `${d.ancestors().reverse().map(d => d.data.name).join("/")}\n${humanFileSize(d.value)}`);
|
||||
leaf.append("title")
|
||||
.text(d => `${d.ancestors().reverse().map(d => d.data.name).join("/")}\n${humanFileSize(d.value)}`);
|
||||
|
||||
leaf.append("rect")
|
||||
.attr("id", d => (d.leafUid = uid("leaf")))
|
||||
.attr("fill", d => {
|
||||
while (d.depth > groupingDepth) d = d.parent;
|
||||
return ordinalColor(d.data.name);
|
||||
})
|
||||
.attr("fill-opacity", fillOpacity)
|
||||
.attr("width", d => d.x1 - d.x0)
|
||||
.attr("height", d => d.y1 - d.y0);
|
||||
leaf.append("rect")
|
||||
.attr("id", d => (d.leafUid = uid("leaf")))
|
||||
.attr("fill", d => {
|
||||
while (d.depth > groupingDepth) d = d.parent;
|
||||
return ordinalColor(d.data.name);
|
||||
})
|
||||
.attr("fill-opacity", fillOpacity)
|
||||
.attr("width", d => d.x1 - d.x0)
|
||||
.attr("height", d => d.y1 - d.y0);
|
||||
|
||||
leaf.append("clipPath")
|
||||
.attr("id", d => (d.clipUid = uid("clip")))
|
||||
.append("use")
|
||||
.attr("href", d => `#${d.leafUid}`);
|
||||
leaf.append("clipPath")
|
||||
.attr("id", d => (d.clipUid = uid("clip")))
|
||||
.append("use")
|
||||
.attr("href", d => `#${d.leafUid}`);
|
||||
|
||||
leaf.append("text")
|
||||
.attr("clip-path", d => `url(#${d.clipUid})`)
|
||||
.selectAll("tspan")
|
||||
.data(d => {
|
||||
if (d.data.name === ".") {
|
||||
d = d.parent;
|
||||
}
|
||||
return [d.data.name, humanFileSize(d.value)]
|
||||
})
|
||||
.join("tspan")
|
||||
.attr("x", 2)
|
||||
.attr("y", (d, i) => `${i === 0 ? 1.1 : 2.3}em`)
|
||||
.text(d => d);
|
||||
leaf.append("text")
|
||||
.attr("clip-path", d => `url(#${d.clipUid})`)
|
||||
.selectAll("tspan")
|
||||
.data(d => {
|
||||
if (d.data.name === ".") {
|
||||
d = d.parent;
|
||||
}
|
||||
return [d.data.name, humanFileSize(d.value)]
|
||||
})
|
||||
.join("tspan")
|
||||
.attr("x", 2)
|
||||
.attr("y", (d, i) => `${i === 0 ? 1.1 : 2.3}em`)
|
||||
.text(d => d);
|
||||
}
|
||||
|
||||
function exportTreemap(indexName, width, height) {
|
||||
domtoimage.toBlob(document.getElementById("treemap"), {width: width, height: height})
|
||||
.then(function (blob) {
|
||||
let a = document.createElement("a");
|
||||
let url = URL.createObjectURL(blob);
|
||||
domtoimage.toBlob(document.getElementById("treemap"), {width: width, height: height})
|
||||
.then(function (blob) {
|
||||
let a = document.createElement("a");
|
||||
let url = URL.createObjectURL(blob);
|
||||
|
||||
a.href = url;
|
||||
a.download = `${indexName}_treemap.png`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
setTimeout(function () {
|
||||
document.body.removeChild(a);
|
||||
window.URL.revokeObjectURL(url);
|
||||
}, 0);
|
||||
});
|
||||
a.href = url;
|
||||
a.download = `${indexName}_treemap.png`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
setTimeout(function () {
|
||||
document.body.removeChild(a);
|
||||
window.URL.revokeObjectURL(url);
|
||||
}, 0);
|
||||
});
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "D3Treemap",
|
||||
props: ["indexId"],
|
||||
watch: {
|
||||
indexId: function () {
|
||||
this.update(this.indexId);
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.update(this.indexId);
|
||||
},
|
||||
methods: {
|
||||
update(indexId) {
|
||||
const width = SIZES[this.$store.state.optTreemapSize][0];
|
||||
const height = SIZES[this.$store.state.optTreemapSize][1];
|
||||
const tilingMode = this.$store.state.optTreemapTiling;
|
||||
const groupingDepth = this.$store.state.optTreemapColorGroupingDepth;
|
||||
const treemapColor = this.$store.state.optTreemapColor;
|
||||
const treemapType = this.$store.state.optTreemapType;
|
||||
|
||||
const treemapSvg = d3.select("#treemap");
|
||||
|
||||
treemapSvg.selectAll("*").remove();
|
||||
treemapSvg.attr("viewBox", [0, 0, width, height])
|
||||
.attr("xmlns", "http://www.w3.org/2000/svg")
|
||||
.attr("xmlns:xlink", "http://www.w3.org/1999/xlink")
|
||||
.attr("version", "1.1")
|
||||
.style("overflow", "visible")
|
||||
.style("font", "10px sans-serif");
|
||||
|
||||
d3.json(Sist2Api.getTreemapStat(indexId)).then(tabularData => {
|
||||
tabularData.forEach(row => {
|
||||
row.taxonomy = row.path.split("/");
|
||||
row.size = Number(row.size);
|
||||
});
|
||||
|
||||
if (treemapType === "cascaded") {
|
||||
const data = burrow(tabularData, false);
|
||||
cascadeTreemap(data, treemapSvg, width, height, tilingMode, treemapColor);
|
||||
} else {
|
||||
const data = burrow(tabularData.sort((a, b) => b.taxonomy.length - a.taxonomy.length), true);
|
||||
const fillOpacity = this.$store.state.optTheme === "black" ? 0.9 : 0.6;
|
||||
flatTreemap(data, treemapSvg, width, height, groupingDepth, tilingMode, fillOpacity);
|
||||
name: "D3Treemap",
|
||||
props: ["indexId"],
|
||||
watch: {
|
||||
indexId: function () {
|
||||
this.update(this.indexId);
|
||||
}
|
||||
});
|
||||
},
|
||||
downloadTreemap() {
|
||||
const width = SIZES[this.$store.state.optTreemapSize][0];
|
||||
const height = SIZES[this.$store.state.optTreemapSize][1];
|
||||
mounted() {
|
||||
this.update(this.indexId);
|
||||
},
|
||||
methods: {
|
||||
update(indexId) {
|
||||
const width = SIZES[this.$store.state.optTreemapSize][0];
|
||||
const height = SIZES[this.$store.state.optTreemapSize][1];
|
||||
const tilingMode = this.$store.state.optTreemapTiling;
|
||||
const groupingDepth = this.$store.state.optTreemapColorGroupingDepth;
|
||||
const treemapColor = this.$store.state.optTreemapColor;
|
||||
const treemapType = this.$store.state.optTreemapType;
|
||||
|
||||
exportTreemap(this.indexId, width, height);
|
||||
const treemapSvg = d3.select("#treemap");
|
||||
|
||||
treemapSvg.selectAll("*").remove();
|
||||
treemapSvg.attr("viewBox", [0, 0, width, height])
|
||||
.attr("xmlns", "http://www.w3.org/2000/svg")
|
||||
.attr("xmlns:xlink", "http://www.w3.org/1999/xlink")
|
||||
.attr("version", "1.1")
|
||||
.style("overflow", "visible")
|
||||
.style("font", "10px sans-serif");
|
||||
|
||||
d3.json(Sist2Api.getTreemapStat(indexId)).then(tabularData => {
|
||||
tabularData.forEach(row => {
|
||||
row.taxonomy = row.path.split("/");
|
||||
row.size = Number(row.size);
|
||||
});
|
||||
|
||||
if (treemapType === "cascaded") {
|
||||
const data = burrow(tabularData, false);
|
||||
cascadeTreemap(data, treemapSvg, width, height, tilingMode, treemapColor);
|
||||
} else {
|
||||
const data = burrow(tabularData.sort((a, b) => b.taxonomy.length - a.taxonomy.length), true);
|
||||
const fillOpacity = this.$store.state.optTheme === "black" ? 0.9 : 0.6;
|
||||
flatTreemap(data, treemapSvg, width, height, groupingDepth, tilingMode, fillOpacity);
|
||||
}
|
||||
});
|
||||
},
|
||||
downloadTreemap() {
|
||||
const width = SIZES[this.$store.state.optTreemapSize][0];
|
||||
const height = SIZES[this.$store.state.optTreemapSize][1];
|
||||
|
||||
exportTreemap(this.indexId, width, height);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -57,7 +57,9 @@ export default {
|
||||
};
|
||||
})
|
||||
},
|
||||
indices: () => this.$store.state.indices
|
||||
indices() {
|
||||
return this.$store.state.indices;
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -83,6 +83,10 @@ void exec_args_destroy(exec_args_t *args) {
|
||||
free(args);
|
||||
}
|
||||
|
||||
void sqlite_index_args_destroy(sqlite_index_args_t *args) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
if (argc < 2) {
|
||||
fprintf(stderr, "Required positional argument: PATH.\n");
|
||||
|
||||
@@ -134,5 +134,7 @@ void exec_args_destroy(exec_args_t *args);
|
||||
|
||||
int exec_args_validate(exec_args_t *args, int argc, const char **argv);
|
||||
|
||||
void sqlite_index_args_destroy(sqlite_index_args_t *args);
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
@@ -447,12 +447,16 @@ database_summary_stats_t database_fts_get_date_range(database_t *db) {
|
||||
return stats;
|
||||
}
|
||||
|
||||
char *get_after_where(char **after, fts_sort_t sort) {
|
||||
char *get_after_where(char **after, fts_sort_t sort, int sort_asc) {
|
||||
if (after == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return "(sort_var, doc.ROWID) > (?3, ?4)";
|
||||
if (sort_asc) {
|
||||
return "(sort_var, doc.ROWID) > (?3, ?4)";
|
||||
}
|
||||
|
||||
return "(sort_var, doc.ROWID) < (?3, ?4)";
|
||||
}
|
||||
|
||||
cJSON *database_fts_search(database_t *db, const char *query, const char *path, long size_min,
|
||||
@@ -469,7 +473,7 @@ cJSON *database_fts_search(database_t *db, const char *query, const char *path,
|
||||
char *index_id_where = index_ids_where_clause(index_ids);
|
||||
char *mime_where = mime_types_where_clause(mime_types);
|
||||
const char *query_where = match_where(query);
|
||||
const char *after_where = get_after_where(after, sort);
|
||||
const char *after_where = get_after_where(after, sort, sort_asc);
|
||||
const char *tags_where = tags_where_clause(tags);
|
||||
|
||||
if (!query_where && sort == FTS_SORT_SCORE) {
|
||||
|
||||
@@ -149,6 +149,7 @@ void database_generate_stats(database_t *db, double treemap_threshold) {
|
||||
|
||||
merged_rows += 1;
|
||||
}
|
||||
free(iter);
|
||||
} while (merged_rows > TREEMAP_MINIMUM_MERGES_TO_CONTINUE);
|
||||
|
||||
CRASH_IF_NOT_SQLITE_OK(sqlite3_exec(db->db,
|
||||
|
||||
@@ -68,9 +68,7 @@ void database_scan_begin(scan_args_t *args) {
|
||||
desc->version_patch = VersionPatch;
|
||||
|
||||
// generate new index id based on timestamp
|
||||
unsigned char index_md5[MD5_DIGEST_LENGTH];
|
||||
MD5((unsigned char *) &ScanCtx.index.desc.timestamp, sizeof(ScanCtx.index.desc.timestamp), index_md5);
|
||||
buf2hex(index_md5, MD5_DIGEST_LENGTH, ScanCtx.index.desc.id);
|
||||
md5_hexdigest(&ScanCtx.index.desc.timestamp, sizeof(ScanCtx.index.desc.timestamp), ScanCtx.index.desc.id);
|
||||
|
||||
database_initialize(db);
|
||||
database_open(db);
|
||||
@@ -683,6 +681,7 @@ int main(int argc, const char *argv[]) {
|
||||
index_args_destroy(index_args);
|
||||
web_args_destroy(web_args);
|
||||
exec_args_destroy(exec_args);
|
||||
sqlite_index_args_destroy(sqlite_index_args);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -2,15 +2,18 @@
|
||||
#define SIST2_FS_UTIL_H
|
||||
|
||||
#include "src/sist.h"
|
||||
#include <openssl/evp.h>
|
||||
|
||||
#define CLOSE_FILE(f) if ((f).close != NULL) {(f).close(&(f));};
|
||||
|
||||
static int fs_read(struct vfile *f, void *buf, size_t size) {
|
||||
if (f->fd == -1) {
|
||||
SHA1_Init(&f->sha1_ctx);
|
||||
f->sha1_ctx = EVP_MD_CTX_new();
|
||||
EVP_DigestInit_ex(f->sha1_ctx, EVP_sha1(), NULL);
|
||||
|
||||
f->fd = open(f->filepath, O_RDONLY);
|
||||
if (f->fd == -1) {
|
||||
EVP_MD_CTX_free(f->sha1_ctx);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
@@ -19,7 +22,7 @@ static int fs_read(struct vfile *f, void *buf, size_t size) {
|
||||
|
||||
if (ret != 0 && f->calculate_checksum) {
|
||||
f->has_checksum = TRUE;
|
||||
safe_sha1_update(&f->sha1_ctx, (unsigned char *) buf, ret);
|
||||
safe_digest_update(f->sha1_ctx, (unsigned char *) buf, ret);
|
||||
}
|
||||
|
||||
return ret;
|
||||
@@ -27,8 +30,11 @@ static int fs_read(struct vfile *f, void *buf, size_t size) {
|
||||
|
||||
static void fs_close(struct vfile *f) {
|
||||
if (f->fd != -1) {
|
||||
SHA1_Final(f->sha1_digest, &f->sha1_ctx);
|
||||
EVP_DigestFinal_ex(f->sha1_ctx, f->sha1_digest, NULL);
|
||||
EVP_MD_CTX_free(f->sha1_ctx);
|
||||
f->sha1_ctx = NULL;
|
||||
close(f->fd);
|
||||
f->fd = -1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -53,9 +53,9 @@ file_type_t get_file_type(unsigned int mime, size_t size, const char *filepath)
|
||||
} else if (IS_FONT(mime)) {
|
||||
return FILETYPE_FONT;
|
||||
} else if (ScanCtx.arc_ctx.mode != ARC_MODE_SKIP && (
|
||||
IS_ARC(mime) ||
|
||||
(IS_ARC_FILTER(mime) && should_parse_filtered_file(filepath))
|
||||
)) {
|
||||
IS_ARC(mime) ||
|
||||
(IS_ARC_FILTER(mime) && should_parse_filtered_file(filepath))
|
||||
)) {
|
||||
return FILETYPE_ARCHIVE;
|
||||
} else if ((ScanCtx.ooxml_ctx.content_size > 0 || ScanCtx.media_ctx.tn_size > 0) && IS_DOC(mime)) {
|
||||
return FILETYPE_OOXML;
|
||||
@@ -155,19 +155,17 @@ void parse(parse_job_t *job) {
|
||||
doc->meta_head = NULL;
|
||||
doc->meta_tail = NULL;
|
||||
doc->size = job->vfile.st_size;
|
||||
doc->mtime = job->vfile.mtime;
|
||||
doc->mtime = MAX(job->vfile.mtime, 0);
|
||||
doc->mime = get_mime(job);
|
||||
generate_doc_id(doc->filepath + ScanCtx.index.desc.root_len, doc->doc_id);
|
||||
|
||||
if (doc->mime == GET_MIME_ERROR_FATAL) {
|
||||
|
||||
CLOSE_FILE(job->vfile)
|
||||
free(doc);
|
||||
return;
|
||||
}
|
||||
|
||||
if (database_mark_document(ProcData.index_db, doc->doc_id, doc->mtime)) {
|
||||
|
||||
CLOSE_FILE(job->vfile)
|
||||
free(doc);
|
||||
return;
|
||||
|
||||
@@ -51,11 +51,11 @@
|
||||
#include <ctype.h>
|
||||
#include "git_hash.h"
|
||||
|
||||
#define VERSION "3.1.2"
|
||||
#define VERSION "3.1.4"
|
||||
static const char *const Version = VERSION;
|
||||
static const int VersionMajor = 3;
|
||||
static const int VersionMinor = 1;
|
||||
static const int VersionPatch = 2;
|
||||
static const int VersionPatch = 4;
|
||||
|
||||
#ifndef SIST_PLATFORM
|
||||
#define SIST_PLATFORM unknown
|
||||
|
||||
18
src/util.h
18
src/util.h
@@ -7,6 +7,7 @@
|
||||
|
||||
#include "third-party/utf8.h/utf8.h"
|
||||
#include "libscan/scan.h"
|
||||
#include <openssl/evp.h>
|
||||
|
||||
|
||||
char *abspath(const char *path);
|
||||
@@ -86,13 +87,22 @@ static void buf2hex(const unsigned char *buf, size_t buflen, char *hex_string) {
|
||||
*s = '\0';
|
||||
}
|
||||
|
||||
static void md5_hexdigest(void *data, size_t size, char *output) {
|
||||
EVP_MD_CTX *md_ctx = EVP_MD_CTX_new();
|
||||
EVP_DigestInit_ex(md_ctx, EVP_md5(), NULL);
|
||||
|
||||
EVP_DigestUpdate(md_ctx, data, size);
|
||||
|
||||
unsigned char digest[MD5_DIGEST_LENGTH];
|
||||
EVP_DigestFinal_ex(md_ctx, digest, NULL);
|
||||
EVP_MD_CTX_free(md_ctx);
|
||||
|
||||
buf2hex(digest, MD5_DIGEST_LENGTH, output);
|
||||
}
|
||||
|
||||
__always_inline
|
||||
static void generate_doc_id(const char *rel_path, char *doc_id) {
|
||||
unsigned char md[MD5_DIGEST_LENGTH];
|
||||
|
||||
MD5((unsigned char *) rel_path, strlen(rel_path), md);
|
||||
buf2hex(md, sizeof(md), doc_id);
|
||||
md5_hexdigest(rel_path, strlen(rel_path), doc_id);
|
||||
}
|
||||
|
||||
#define MILLISECOND 1000
|
||||
|
||||
@@ -248,9 +248,11 @@ void serve_file_from_disk(cJSON *json, index_t *idx, struct mg_connection *nc, s
|
||||
|
||||
char mime_mapping[8192];
|
||||
if (strlen(ext) == 0) {
|
||||
snprintf(mime_mapping, sizeof(mime_mapping), "%s=%s", full_path, mime);
|
||||
snprintf(mime_mapping, sizeof(mime_mapping), "%s=%s%s",
|
||||
full_path, mime, STR_STARTS_WITH_CONSTANT(mime, "text/") ? "; charset=utf8" : "");
|
||||
} else {
|
||||
snprintf(mime_mapping, sizeof(mime_mapping), "%s=%s", ext, mime);
|
||||
snprintf(mime_mapping, sizeof(mime_mapping), "%s=%s%s",
|
||||
ext, mime, STR_STARTS_WITH_CONSTANT(mime, "text/") ? "; charset=utf8" : "");
|
||||
}
|
||||
|
||||
struct mg_http_serve_opts opts = {
|
||||
@@ -526,9 +528,9 @@ void tag(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
}
|
||||
|
||||
tag_req_t *req = parse_tag_request(json);
|
||||
cJSON_Delete(json);
|
||||
if (req == NULL) {
|
||||
LOG_DEBUGF("serve.c", "Could not parse tag request", arg_index);
|
||||
cJSON_Delete(json);
|
||||
HTTP_REPLY_BAD_REQUEST
|
||||
return;
|
||||
}
|
||||
@@ -552,6 +554,7 @@ void tag(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
}
|
||||
|
||||
free(req);
|
||||
cJSON_Delete(json);
|
||||
}
|
||||
|
||||
int validate_auth(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
2
third-party/libscan/libscan-test-files
vendored
2
third-party/libscan/libscan-test-files
vendored
Submodule third-party/libscan/libscan-test-files updated: 9ac2cc1037...0167ada040
13
third-party/libscan/libscan/arc/arc.c
vendored
13
third-party/libscan/libscan/arc/arc.c
vendored
@@ -22,7 +22,11 @@ int should_parse_filtered_file(const char *filepath) {
|
||||
}
|
||||
|
||||
void arc_close(struct vfile *f) {
|
||||
SHA1_Final(f->sha1_digest, &f->sha1_ctx);
|
||||
if (f->sha1_ctx != NULL) {
|
||||
EVP_DigestFinal_ex(f->sha1_ctx, f->sha1_digest, NULL);
|
||||
EVP_MD_CTX_free(f->sha1_ctx);
|
||||
f->sha1_ctx = NULL;
|
||||
}
|
||||
|
||||
if (f->rewind_buffer != NULL) {
|
||||
free(f->rewind_buffer);
|
||||
@@ -59,7 +63,7 @@ int arc_read(struct vfile *f, void *buf, size_t size) {
|
||||
if (bytes_read != 0 && bytes_read <= size && f->calculate_checksum) {
|
||||
f->has_checksum = TRUE;
|
||||
|
||||
safe_sha1_update(&f->sha1_ctx, (unsigned char *) buf, bytes_read);
|
||||
safe_digest_update(f->sha1_ctx, (unsigned char *) buf, bytes_read);
|
||||
}
|
||||
|
||||
if (bytes_read != size && archive_errno(f->arc) != 0) {
|
||||
@@ -237,9 +241,12 @@ scan_code_t parse_archive(scan_arc_ctx_t *ctx, vfile_t *f, document_t *doc, pcre
|
||||
sub_job->ext = (int) strlen(sub_job->filepath);
|
||||
}
|
||||
|
||||
SHA1_Init(&sub_job->vfile.sha1_ctx);
|
||||
sub_job->vfile.sha1_ctx = EVP_MD_CTX_new();
|
||||
EVP_DigestInit(sub_job->vfile.sha1_ctx, EVP_sha1());
|
||||
|
||||
ctx->parse(sub_job);
|
||||
|
||||
sub_job->vfile.close(&sub_job->vfile);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
9
third-party/libscan/libscan/arc/arc.h
vendored
9
third-party/libscan/libscan/arc/arc.h
vendored
@@ -35,7 +35,8 @@ static int vfile_open_callback(struct archive *a, void *user_data) {
|
||||
arc_data_t *data = (arc_data_t *) user_data;
|
||||
|
||||
if (!data->f->is_fs_file) {
|
||||
SHA1_Init(&data->f->sha1_ctx);
|
||||
data->f->sha1_ctx = EVP_MD_CTX_new();
|
||||
EVP_DigestInit(data->f->sha1_ctx, EVP_md5());
|
||||
}
|
||||
|
||||
return ARCHIVE_OK;
|
||||
@@ -49,7 +50,7 @@ static long vfile_read_callback(struct archive *a, void *user_data, const void *
|
||||
|
||||
if (!data->f->is_fs_file && ret > 0) {
|
||||
data->f->has_checksum = TRUE;
|
||||
safe_sha1_update(&data->f->sha1_ctx, (unsigned char*)data->buf, ret);
|
||||
safe_digest_update(data->f->sha1_ctx, (unsigned char *) data->buf, ret);
|
||||
}
|
||||
|
||||
return ret;
|
||||
@@ -59,7 +60,9 @@ static int vfile_close_callback(struct archive *a, void *user_data) {
|
||||
arc_data_t *data = (arc_data_t *) user_data;
|
||||
|
||||
if (!data->f->is_fs_file) {
|
||||
SHA1_Final((unsigned char *) data->f->sha1_digest, &data->f->sha1_ctx);
|
||||
EVP_DigestFinal_ex(data->f->sha1_ctx, data->f->sha1_digest, NULL);
|
||||
EVP_MD_CTX_free(data->f->sha1_ctx);
|
||||
data->f->sha1_ctx = NULL;
|
||||
}
|
||||
|
||||
return ARCHIVE_OK;
|
||||
|
||||
153
third-party/libscan/libscan/ebook/ebook.c
vendored
153
third-party/libscan/libscan/ebook/ebook.c
vendored
@@ -11,8 +11,6 @@
|
||||
pthread_mutex_t Mutex;
|
||||
#endif
|
||||
|
||||
/* fill_image callback doesn't let us pass opaque pointers unless I create my own device */
|
||||
__thread text_buffer_t thread_buffer;
|
||||
__thread scan_ebook_ctx_t thread_ctx;
|
||||
|
||||
static void my_fz_lock(UNUSED(void *user), int lock) {
|
||||
@@ -232,21 +230,47 @@ static int read_stext_block(fz_stext_block *block, text_buffer_t *tex) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void fill_image_ocr_cb(const char* text, size_t len) {
|
||||
text_buffer_append_string(&thread_buffer, text, len - 1);
|
||||
int read_stext(text_buffer_t *tex, fz_stext_page *stext) {
|
||||
|
||||
int count = 0;
|
||||
|
||||
fz_stext_block *block = stext->first_block;
|
||||
|
||||
while (block != NULL) {
|
||||
int ret = read_stext_block(block, tex);
|
||||
count += 1;
|
||||
if (ret == TEXT_BUF_FULL) {
|
||||
break;
|
||||
}
|
||||
block = block->next;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
void fill_image(fz_context *fzctx, UNUSED(fz_device *dev),
|
||||
fz_image *img, UNUSED(fz_matrix ctm), UNUSED(float alpha),
|
||||
UNUSED(fz_color_params color_params)) {
|
||||
int load_page(fz_context *fzctx, fz_document *fzdoc, int current_page, fz_page **page) {
|
||||
int err = 0;
|
||||
|
||||
int l2factor = 0;
|
||||
fz_var(err);
|
||||
fz_try(fzctx)(*page) = fz_load_page(fzctx, fzdoc, current_page);
|
||||
fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
|
||||
if (img->w >= MIN_OCR_WIDTH && img->h >= MIN_OCR_HEIGHT && OCR_IS_VALID_BPP(img->n)) {
|
||||
fz_pixmap *pix = img->get_pixmap(fzctx, img, NULL, img->w, img->h, &l2factor);
|
||||
ocr_extract_text(thread_ctx.tesseract_path, thread_ctx.tesseract_lang, pix->samples, pix->w, pix->h, pix->n, (int)pix->stride, pix->xres, fill_image_ocr_cb);
|
||||
fz_drop_pixmap(fzctx, pix);
|
||||
}
|
||||
return err;
|
||||
}
|
||||
|
||||
fz_device *new_stext_dev(fz_context *fzctx, fz_stext_page *stext) {
|
||||
fz_stext_options opts = {
|
||||
.flags = FZ_STEXT_DEHYPHENATE,
|
||||
.scale = 0
|
||||
};
|
||||
|
||||
fz_device *stext_dev = fz_new_stext_device(fzctx, stext, &opts);
|
||||
stext_dev->stroke_path = NULL;
|
||||
stext_dev->stroke_text = NULL;
|
||||
stext_dev->clip_text = NULL;
|
||||
stext_dev->clip_stroke_path = NULL;
|
||||
stext_dev->clip_stroke_text = NULL;
|
||||
return stext_dev;
|
||||
}
|
||||
|
||||
void
|
||||
@@ -326,46 +350,37 @@ parse_ebook_mem(scan_ebook_ctx_t *ctx, void *buf, size_t buf_len, const char *mi
|
||||
|
||||
|
||||
if (ctx->content_size > 0) {
|
||||
fz_stext_options opts = {0};
|
||||
thread_buffer = text_buffer_create(ctx->content_size);
|
||||
text_buffer_t tex = text_buffer_create(ctx->content_size);
|
||||
|
||||
for (int current_page = 0; current_page < page_count; current_page++) {
|
||||
fz_page *page = NULL;
|
||||
fz_var(err);
|
||||
fz_try(fzctx)page = fz_load_page(fzctx, fzdoc, current_page);
|
||||
fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
err = load_page(fzctx, fzdoc, current_page, &page);
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_load_page() returned error code [%d] %s", err, fzctx->error.message);
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
CTX_LOG_WARNINGF(doc->filepath,
|
||||
"fz_load_page() returned error code [%d] %s", err, fzctx->error.message);
|
||||
text_buffer_destroy(&tex);
|
||||
fz_drop_page(fzctx, page);
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
fz_rect page_mediabox = fz_bound_page(fzctx, page);
|
||||
|
||||
fz_stext_page *stext = fz_new_stext_page(fzctx, fz_bound_page(fzctx, page));
|
||||
fz_device *dev = fz_new_stext_device(fzctx, stext, &opts);
|
||||
dev->stroke_path = NULL;
|
||||
dev->stroke_text = NULL;
|
||||
dev->clip_text = NULL;
|
||||
dev->clip_stroke_path = NULL;
|
||||
dev->clip_stroke_text = NULL;
|
||||
|
||||
if (ctx->tesseract_lang != NULL) {
|
||||
dev->fill_image = fill_image;
|
||||
}
|
||||
fz_stext_page *stext = fz_new_stext_page(fzctx, page_mediabox);
|
||||
fz_device *stext_dev = new_stext_dev(fzctx, stext);
|
||||
|
||||
fz_var(err);
|
||||
fz_try(fzctx)fz_run_page(fzctx, page, dev, fz_identity, NULL);
|
||||
fz_try(fzctx)fz_run_page(fzctx, page, stext_dev, fz_identity, NULL);
|
||||
fz_always(fzctx) {
|
||||
fz_close_device(fzctx, dev);
|
||||
fz_drop_device(fzctx, dev);
|
||||
} fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
fz_close_device(fzctx, stext_dev);
|
||||
fz_drop_device(fzctx, stext_dev);
|
||||
} fz_catch(fzctx) err = fzctx->error.errcode;
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_run_page() returned error code [%d] %s", err, fzctx->error.message);
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
text_buffer_destroy(&tex);
|
||||
fz_drop_page(fzctx, page);
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
fz_drop_stream(fzctx, stream);
|
||||
@@ -374,29 +389,63 @@ parse_ebook_mem(scan_ebook_ctx_t *ctx, void *buf, size_t buf_len, const char *mi
|
||||
return;
|
||||
}
|
||||
|
||||
fz_stext_block *block = stext->first_block;
|
||||
while (block != NULL) {
|
||||
int ret = read_stext_block(block, &thread_buffer);
|
||||
if (ret == TEXT_BUF_FULL) {
|
||||
break;
|
||||
}
|
||||
block = block->next;
|
||||
}
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
fz_drop_page(fzctx, page);
|
||||
int num_blocks_read = read_stext(&tex, stext);
|
||||
|
||||
if (thread_buffer.dyn_buffer.cur >= ctx->content_size) {
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
|
||||
if (tex.dyn_buffer.cur >= ctx->content_size) {
|
||||
fz_drop_page(fzctx, page);
|
||||
break;
|
||||
}
|
||||
}
|
||||
text_buffer_terminate_string(&thread_buffer);
|
||||
|
||||
meta_line_t *meta_content = malloc(sizeof(meta_line_t) + thread_buffer.dyn_buffer.cur);
|
||||
// If OCR is enabled and no text is found on the page
|
||||
if (ctx->tesseract_lang != NULL && num_blocks_read == 0) {
|
||||
stext = fz_new_stext_page(fzctx, page_mediabox);
|
||||
stext_dev = new_stext_dev(fzctx, stext);
|
||||
|
||||
fz_device *ocr_dev = fz_new_ocr_device(fzctx, stext_dev, fz_identity,
|
||||
page_mediabox, TRUE,
|
||||
ctx->tesseract_lang,
|
||||
ctx->tesseract_path,
|
||||
NULL, NULL);
|
||||
|
||||
fz_var(err);
|
||||
fz_try(fzctx)fz_run_page(fzctx, page, ocr_dev, fz_identity, NULL);
|
||||
fz_always(fzctx) {
|
||||
fz_close_device(fzctx, ocr_dev);
|
||||
fz_drop_device(fzctx, ocr_dev);
|
||||
} fz_catch(fzctx) err = fzctx->error.errcode;
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_run_page() returned error code [%d] %s", err, fzctx->error.message);
|
||||
fz_close_device(fzctx, stext_dev);
|
||||
fz_drop_device(fzctx, stext_dev);
|
||||
text_buffer_destroy(&tex);
|
||||
fz_drop_page(fzctx, page);
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
fz_close_device(fzctx, stext_dev);
|
||||
fz_drop_device(fzctx, stext_dev);
|
||||
|
||||
read_stext(&tex, stext);
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
}
|
||||
|
||||
fz_drop_page(fzctx, page);
|
||||
}
|
||||
text_buffer_terminate_string(&tex);
|
||||
|
||||
meta_line_t *meta_content = malloc(sizeof(meta_line_t) + tex.dyn_buffer.cur);
|
||||
meta_content->key = MetaContent;
|
||||
memcpy(meta_content->str_val, thread_buffer.dyn_buffer.buf, thread_buffer.dyn_buffer.cur);
|
||||
memcpy(meta_content->str_val, tex.dyn_buffer.buf, tex.dyn_buffer.cur);
|
||||
APPEND_META(doc, meta_content);
|
||||
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
text_buffer_destroy(&tex);
|
||||
}
|
||||
|
||||
fz_drop_stream(fzctx, stream);
|
||||
|
||||
48
third-party/libscan/libscan/media/media.c
vendored
48
third-party/libscan/libscan/media/media.c
vendored
@@ -118,13 +118,12 @@ static void read_subtitles(scan_media_ctx_t *ctx, AVFormatContext *pFormatCtx, i
|
||||
AVPacket packet;
|
||||
AVSubtitle subtitle;
|
||||
|
||||
AVCodec *subtitle_codec = avcodec_find_decoder(pFormatCtx->streams[stream_idx]->codecpar->codec_id);
|
||||
const AVCodec *subtitle_codec = avcodec_find_decoder(pFormatCtx->streams[stream_idx]->codecpar->codec_id);
|
||||
AVCodecContext *decoder = avcodec_alloc_context3(subtitle_codec);
|
||||
decoder->thread_count = 1;
|
||||
avcodec_parameters_to_context(decoder, pFormatCtx->streams[stream_idx]->codecpar);
|
||||
avcodec_open2(decoder, subtitle_codec, NULL);
|
||||
|
||||
decoder->sub_text_format = FF_SUB_TEXT_FMT_ASS;
|
||||
|
||||
int got_sub;
|
||||
|
||||
while (1) {
|
||||
@@ -177,8 +176,6 @@ read_frame(scan_media_ctx_t *ctx, AVFormatContext *pFormatCtx, AVCodecContext *d
|
||||
result->packet = av_packet_alloc();
|
||||
result->frame = av_frame_alloc();
|
||||
|
||||
av_init_packet(result->packet);
|
||||
|
||||
int receive_ret = -EAGAIN;
|
||||
while (receive_ret == -EAGAIN) {
|
||||
// Get video frame
|
||||
@@ -477,13 +474,12 @@ int decode_frame_and_save_thumbnail(scan_media_ctx_t *ctx, AVFormatContext *pFor
|
||||
avcodec_send_frame(thumbnail_encoder, scaled_frame);
|
||||
avcodec_send_frame(thumbnail_encoder, NULL); // send EOF
|
||||
|
||||
AVPacket thumbnail_packet;
|
||||
av_init_packet(&thumbnail_packet);
|
||||
avcodec_receive_packet(thumbnail_encoder, &thumbnail_packet);
|
||||
AVPacket *thumbnail_packet = av_packet_alloc();
|
||||
avcodec_receive_packet(thumbnail_encoder, thumbnail_packet);
|
||||
|
||||
// Save thumbnail
|
||||
if (thumbnail_index == 0) {
|
||||
ctx->store(doc->doc_id, 0, thumbnail_packet.data, thumbnail_packet.size);
|
||||
ctx->store(doc->doc_id, 0, thumbnail_packet->data, thumbnail_packet->size);
|
||||
return_value = SAVE_THUMBNAIL_OK;
|
||||
|
||||
} else if (thumbnail_index > 1) {
|
||||
@@ -491,7 +487,7 @@ int decode_frame_and_save_thumbnail(scan_media_ctx_t *ctx, AVFormatContext *pFor
|
||||
// I figure out a better fix.
|
||||
thumbnail_index -= 1;
|
||||
|
||||
ctx->store(doc->doc_id, thumbnail_index, thumbnail_packet.data, thumbnail_packet.size);
|
||||
ctx->store(doc->doc_id, thumbnail_index, thumbnail_packet->data, thumbnail_packet->size);
|
||||
|
||||
return_value = SAVE_THUMBNAIL_OK;
|
||||
} else {
|
||||
@@ -499,7 +495,7 @@ int decode_frame_and_save_thumbnail(scan_media_ctx_t *ctx, AVFormatContext *pFor
|
||||
}
|
||||
|
||||
avcodec_free_context(&thumbnail_encoder);
|
||||
av_packet_unref(&thumbnail_packet);
|
||||
av_packet_free(&thumbnail_packet);
|
||||
av_free(*scaled_frame->data);
|
||||
av_frame_free(&scaled_frame);
|
||||
}
|
||||
@@ -578,8 +574,9 @@ void parse_media_format_ctx(scan_media_ctx_t *ctx, AVFormatContext *pFormatCtx,
|
||||
}
|
||||
|
||||
// Decoder
|
||||
AVCodec *video_codec = avcodec_find_decoder(stream->codecpar->codec_id);
|
||||
const AVCodec *video_codec = avcodec_find_decoder(stream->codecpar->codec_id);
|
||||
AVCodecContext *decoder = avcodec_alloc_context3(video_codec);
|
||||
decoder->thread_count = 1;
|
||||
avcodec_parameters_to_context(decoder, stream->codecpar);
|
||||
avcodec_open2(decoder, video_codec, NULL);
|
||||
|
||||
@@ -630,6 +627,9 @@ void parse_media_filename(scan_media_ctx_t *ctx, const char *filepath, document_
|
||||
CTX_LOG_ERROR(doc->filepath, "(media.c) Could not allocate context with avformat_alloc_context()");
|
||||
return;
|
||||
}
|
||||
pFormatCtx->max_analyze_duration = 100000000;
|
||||
pFormatCtx->probesize = 100000000;
|
||||
|
||||
int res = avformat_open_input(&pFormatCtx, filepath, NULL, NULL);
|
||||
if (res < 0) {
|
||||
CTX_LOG_ERRORF(doc->filepath, "(media.c) avformat_open_input() returned [%d] %s", res, av_err2str(res));
|
||||
@@ -697,9 +697,10 @@ int memfile_open(vfile_t *f, memfile_t *mem) {
|
||||
mem->file = fmemopen(mem->buf, mem->size, "rb");
|
||||
|
||||
if (f->calculate_checksum) {
|
||||
SHA1_Init(&f->sha1_ctx);
|
||||
safe_sha1_update(&f->sha1_ctx, mem->buf, mem->size);
|
||||
SHA1_Final(f->sha1_digest, &f->sha1_ctx);
|
||||
safe_digest_update(f->sha1_ctx, mem->buf, mem->size);
|
||||
EVP_DigestFinal_ex(f->sha1_ctx, f->sha1_digest, NULL);
|
||||
EVP_MD_CTX_free(f->sha1_ctx);
|
||||
f->sha1_ctx = NULL;
|
||||
f->has_checksum = TRUE;
|
||||
}
|
||||
|
||||
@@ -729,6 +730,9 @@ void parse_media_vfile(scan_media_ctx_t *ctx, struct vfile *f, document_t *doc,
|
||||
CTX_LOG_ERROR(doc->filepath, "(media.c) Could not allocate context with avformat_alloc_context()");
|
||||
return;
|
||||
}
|
||||
pFormatCtx->max_analyze_duration = 100000000;
|
||||
pFormatCtx->probesize = 100000000;
|
||||
|
||||
|
||||
unsigned char *buffer = (unsigned char *) av_malloc(AVIO_BUF_SIZE);
|
||||
AVIOContext *io_ctx = NULL;
|
||||
@@ -792,6 +796,8 @@ int store_image_thumbnail(scan_media_ctx_t *ctx, void *buf, size_t buf_len, docu
|
||||
CTX_LOG_ERROR(doc->filepath, "(media.c) Could not allocate context with avformat_alloc_context()");
|
||||
return FALSE;
|
||||
}
|
||||
pFormatCtx->max_analyze_duration = 100000000;
|
||||
pFormatCtx->probesize = 100000000;
|
||||
|
||||
unsigned char *buffer = (unsigned char *) av_malloc(AVIO_BUF_SIZE);
|
||||
|
||||
@@ -823,6 +829,7 @@ int store_image_thumbnail(scan_media_ctx_t *ctx, void *buf, size_t buf_len, docu
|
||||
// Decoder
|
||||
const AVCodec *video_codec = avcodec_find_decoder(stream->codecpar->codec_id);
|
||||
AVCodecContext *decoder = avcodec_alloc_context3(video_codec);
|
||||
decoder->thread_count = 1;
|
||||
avcodec_parameters_to_context(decoder, stream->codecpar);
|
||||
avcodec_open2(decoder, video_codec, NULL);
|
||||
|
||||
@@ -861,15 +868,14 @@ int store_image_thumbnail(scan_media_ctx_t *ctx, void *buf, size_t buf_len, docu
|
||||
avcodec_send_frame(jpeg_encoder, scaled_frame);
|
||||
avcodec_send_frame(jpeg_encoder, NULL); // Send EOF
|
||||
|
||||
AVPacket jpeg_packet;
|
||||
av_init_packet(&jpeg_packet);
|
||||
avcodec_receive_packet(jpeg_encoder, &jpeg_packet);
|
||||
AVPacket *jpeg_packet = av_packet_alloc();
|
||||
avcodec_receive_packet(jpeg_encoder, jpeg_packet);
|
||||
|
||||
// Save thumbnail
|
||||
APPEND_LONG_META(doc, MetaThumbnail, 1);
|
||||
ctx->store(doc->doc_id, 0, jpeg_packet.data, jpeg_packet.size);
|
||||
ctx->store(doc->doc_id, 0, jpeg_packet->data, jpeg_packet->size);
|
||||
|
||||
av_packet_unref(&jpeg_packet);
|
||||
av_packet_free(&jpeg_packet);
|
||||
avcodec_free_context(&jpeg_encoder);
|
||||
av_free(*scaled_frame->data);
|
||||
av_frame_free(&scaled_frame);
|
||||
@@ -886,4 +892,4 @@ int store_image_thumbnail(scan_media_ctx_t *ctx, void *buf, size_t buf_len, docu
|
||||
fclose(memfile.file);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
}
|
||||
3
third-party/libscan/libscan/scan.h
vendored
3
third-party/libscan/libscan/scan.h
vendored
@@ -8,6 +8,7 @@
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <sys/stat.h>
|
||||
#include <openssl/evp.h>
|
||||
#include <openssl/md5.h>
|
||||
#include <openssl/sha.h>
|
||||
|
||||
@@ -146,7 +147,7 @@ typedef struct vfile {
|
||||
int mtime;
|
||||
size_t st_size;
|
||||
|
||||
SHA_CTX sha1_ctx;
|
||||
EVP_MD_CTX *sha1_ctx;
|
||||
unsigned char sha1_digest[SHA1_DIGEST_LENGTH];
|
||||
|
||||
void *rewind_buffer;
|
||||
|
||||
5
third-party/libscan/libscan/util.h
vendored
5
third-party/libscan/libscan/util.h
vendored
@@ -6,6 +6,7 @@
|
||||
#include "string.h"
|
||||
#include "../third-party/utf8.h/utf8.h"
|
||||
#include "macros.h"
|
||||
#include <openssl/evp.h>
|
||||
|
||||
#define STR_STARTS_WITH_CONSTANT(x, y) (strncmp(y, x, sizeof(y) - 1) == 0)
|
||||
|
||||
@@ -339,7 +340,7 @@ static void *read_all(vfile_t *f, size_t *size) {
|
||||
#define STACK_BUFFER_SIZE (size_t)(4096 * 8)
|
||||
|
||||
__always_inline
|
||||
static void safe_sha1_update(SHA_CTX *ctx, void *buf, size_t size) {
|
||||
static void safe_digest_update(EVP_MD_CTX *ctx, void *buf, size_t size) {
|
||||
unsigned char stack_buf[STACK_BUFFER_SIZE];
|
||||
|
||||
void *sha1_buf;
|
||||
@@ -351,7 +352,7 @@ static void safe_sha1_update(SHA_CTX *ctx, void *buf, size_t size) {
|
||||
}
|
||||
|
||||
memcpy(sha1_buf, buf, size);
|
||||
SHA1_Update(ctx, (const void *) sha1_buf, size);
|
||||
EVP_DigestUpdate(ctx, sha1_buf, size);
|
||||
|
||||
if (sha1_buf != stack_buf) {
|
||||
free(sha1_buf);
|
||||
|
||||
Reference in New Issue
Block a user