mirror of
https://github.com/simon987/sist2.git
synced 2025-12-12 15:08:53 +00:00
Compare commits
91 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 2882741926 | |||
| edba9b7917 | |||
| e89964d592 | |||
| 329afcbe4f | |||
| 2a2664a5cd | |||
| 0d18637e88 | |||
| 8ad9fc9e32 | |||
| f075b542fe | |||
| 3d4331b27d | |||
| a0db49e7d8 | |||
| 065146ff8a | |||
| d58fcbc788 | |||
| b483447b1c | |||
|
|
0d68d5fc7f | ||
|
|
1813bf505c | ||
|
|
9a6e7c7c47 | ||
|
|
68252b4e80 | ||
|
|
d1f13f2c84 | ||
|
|
6075c21a3a | ||
|
|
f3674ffa02 | ||
|
|
de187eff1c | ||
|
|
8e96174e1f | ||
| 8fa34da02f | |||
| 37919932de | |||
| 8ab8124370 | |||
| bfd080943d | |||
| c6820b6cc6 | |||
| 3c09c45694 | |||
|
|
bb5c17ec78 | ||
|
|
501064da10 | ||
|
|
8f7edf3190 | ||
|
|
e65905a165 | ||
|
|
2cb57f3634 | ||
|
|
679e12f786 | ||
|
|
291d307689 | ||
|
|
7d40b9e959 | ||
| cf56bdfb74 | |||
| b799a2e976 | |||
| 727b57b78a | |||
| 61cb845a0e | |||
| dad14fb66d | |||
| c98a09d264 | |||
| b978132ee0 | |||
| 4dedd281f1 | |||
| 65c499e477 | |||
| 625f3d0d6e | |||
| 64b8aab8bf | |||
| ad95684771 | |||
| b37e5a4ad4 | |||
| 15ae2190cf | |||
| 255bc2d689 | |||
| fe1aa6dd4c | |||
| cd2a44e016 | |||
| ed2a3f342a | |||
| 1107fe9a53 | |||
| a96e65d039 | |||
| 87936eecd4 | |||
|
|
d817a0e9dd | ||
|
|
94a5e0ac59 | ||
| d40f5052f9 | |||
| ee9a8fa514 | |||
| 81008d8936 | |||
| 52466d5d8a | |||
| 5f73fc024b | |||
| f2fd7ccf41 | |||
| d87fee8e00 | |||
|
|
672d1344d7 | ||
| 27e32db1ed | |||
| bb91139ffb | |||
| 70cfa8c37c | |||
| 7493dedc8c | |||
| c786a31bb2 | |||
| 48d024e751 | |||
| 08b2ca9d43 | |||
| ed8b4f4fad | |||
| 66de93a8bd | |||
| e3f78fb693 | |||
| 030643cee0 | |||
| b17b9439df | |||
| 414f65346c | |||
| be8eedc9c7 | |||
| 5b62fe77f2 | |||
| 61ab68ce15 | |||
| 82ecb8bb85 | |||
| a41b5dcc1f | |||
| 06f21d5f0f | |||
| e82a388d1e | |||
| bf02e571b3 | |||
| 750a392a61 | |||
| 3d7b977a82 | |||
| cd71551a22 |
@@ -28,3 +28,4 @@ sist2
|
||||
**/ext_libwpd
|
||||
**/core
|
||||
*.a
|
||||
tmp_scan/
|
||||
34
.drone.yml
34
.drone.yml
@@ -10,22 +10,7 @@ steps:
|
||||
- name: build
|
||||
image: simon987/sist2-build
|
||||
commands:
|
||||
- ./ci/build.sh
|
||||
- name: docker
|
||||
image: plugins/docker
|
||||
settings:
|
||||
username:
|
||||
from_secret: DOCKER_USER
|
||||
password:
|
||||
from_secret: DOCKER_PASSWORD
|
||||
repo: simon987/sist2
|
||||
context: ./
|
||||
dockerfile: ./Dockerfile
|
||||
auto_tag: true
|
||||
auto_tag_suffix: x64-linux
|
||||
when:
|
||||
event:
|
||||
- tag
|
||||
- ./scripts/build.sh
|
||||
- name: scp files
|
||||
image: appleboy/drone-scp
|
||||
settings:
|
||||
@@ -42,6 +27,21 @@ steps:
|
||||
- ./VERSION
|
||||
- ./sist2-x64-linux
|
||||
- ./sist2-x64-linux-debug
|
||||
- name: docker
|
||||
image: plugins/docker
|
||||
settings:
|
||||
username:
|
||||
from_secret: DOCKER_USER
|
||||
password:
|
||||
from_secret: DOCKER_PASSWORD
|
||||
repo: simon987/sist2
|
||||
context: ./
|
||||
dockerfile: ./Dockerfile
|
||||
auto_tag: true
|
||||
auto_tag_suffix: x64-linux
|
||||
when:
|
||||
event:
|
||||
- tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
@@ -55,7 +55,7 @@ steps:
|
||||
- name: build
|
||||
image: simon987/sist2-build-arm64
|
||||
commands:
|
||||
- ./ci/build_arm64.sh
|
||||
- ./scripts/build_arm64.sh
|
||||
- name: scp files
|
||||
image: appleboy/drone-scp
|
||||
settings:
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -10,13 +10,13 @@ Makefile
|
||||
LOG
|
||||
sist2*
|
||||
!sist2-vue/
|
||||
index.sist2/
|
||||
*.sist2/
|
||||
bundle*.css
|
||||
bundle.js
|
||||
*.a
|
||||
vgcore.*
|
||||
build/
|
||||
third-party/
|
||||
third-party/argparse
|
||||
*.idx/
|
||||
VERSION
|
||||
git_hash.h
|
||||
@@ -24,3 +24,5 @@ Testing/
|
||||
test_i
|
||||
test_i_inc
|
||||
node_modules/
|
||||
.cmake/
|
||||
i_inc/
|
||||
11
.gitmodules
vendored
11
.gitmodules
vendored
@@ -1,6 +1,9 @@
|
||||
[submodule "third-party/libscan"]
|
||||
path = third-party/libscan
|
||||
url = https://github.com/simon987/libscan
|
||||
[submodule "third-party/argparse"]
|
||||
path = third-party/argparse
|
||||
url = https://github.com/cofyc/argparse
|
||||
url = https://github.com/simon987/argparse
|
||||
[submodule "third-party/libscan/third-party/utf8.h"]
|
||||
path = third-party/libscan/third-party/utf8.h
|
||||
url = https://github.com/sheredom/utf8.h
|
||||
[submodule "third-party/libscan/third-party/antiword"]
|
||||
path = third-party/libscan/third-party/antiword
|
||||
url = https://github.com/simon987/antiword
|
||||
|
||||
@@ -21,10 +21,6 @@ set(ARGPARSE_SHARED off)
|
||||
add_subdirectory(third-party/argparse)
|
||||
|
||||
add_executable(sist2
|
||||
|
||||
# argparse
|
||||
third-party/argparse/argparse.h third-party/argparse/argparse.c
|
||||
|
||||
src/main.c
|
||||
src/sist.h
|
||||
src/io/walk.h src/io/walk.c
|
||||
@@ -41,7 +37,11 @@ add_executable(sist2
|
||||
src/log.c src/log.h
|
||||
src/cli.c src/cli.h
|
||||
src/stats.c src/stats.h src/ctx.c
|
||||
src/parsing/sidecar.c src/parsing/sidecar.h)
|
||||
src/parsing/sidecar.c src/parsing/sidecar.h
|
||||
|
||||
# argparse
|
||||
third-party/argparse/argparse.h third-party/argparse/argparse.c
|
||||
)
|
||||
|
||||
target_link_directories(sist2 PRIVATE BEFORE ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib/)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a .lib)
|
||||
@@ -86,6 +86,7 @@ if (SIST_DEBUG)
|
||||
sist2
|
||||
PRIVATE
|
||||
-fsanitize=address
|
||||
-static-libasan
|
||||
)
|
||||
set_target_properties(
|
||||
sist2
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@@ -5,13 +5,11 @@ WORKDIR /build/
|
||||
COPY . .
|
||||
RUN cmake -DSIST_PLATFORM=x64_linux -DSIST_DEBUG=off -DBUILD_TESTS=off -DCMAKE_TOOLCHAIN_FILE=/vcpkg/scripts/buildsystems/vcpkg.cmake .
|
||||
RUN make -j$(nproc)
|
||||
RUN strip sist2
|
||||
RUN ls -lh
|
||||
RUN ls -lh sist2-vue/dist/
|
||||
RUN strip sist2 || mv sist2_debug sist2
|
||||
|
||||
FROM ubuntu:20.10
|
||||
FROM --platform="linux/amd64" ubuntu:21.10
|
||||
|
||||
RUN apt update && apt install -y curl libasan5
|
||||
RUN apt update && apt install -y curl libasan5 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir -p /usr/share/tessdata && \
|
||||
cd /usr/share/tessdata/ && \
|
||||
@@ -22,9 +20,9 @@ RUN mkdir -p /usr/share/tessdata && \
|
||||
curl -o /usr/share/tessdata/rus.traineddata https://raw.githubusercontent.com/tesseract-ocr/tessdata/master/rus.traineddata &&\
|
||||
curl -o /usr/share/tessdata/spa.traineddata https://raw.githubusercontent.com/tesseract-ocr/tessdata/master/spa.traineddata
|
||||
|
||||
COPY --from=build /build/sist2 /root/sist2
|
||||
ENTRYPOINT ["/root/sist2"]
|
||||
|
||||
ENV LANG C.UTF-8
|
||||
ENV LC_ALL C.UTF-8
|
||||
|
||||
ENTRYPOINT ["/root/sist2"]
|
||||
COPY --from=build /build/sist2 /root/sist2
|
||||
|
||||
@@ -7,9 +7,9 @@ RUN cmake -DSIST_PLATFORM=arm64_linux -DSIST_DEBUG=off -DBUILD_TESTS=off -DCMAKE
|
||||
RUN make -j$(nproc)
|
||||
RUN strip sist2
|
||||
|
||||
FROM ubuntu:20.10
|
||||
FROM --platform="linux/arm64/v8" ubuntu:21.10
|
||||
|
||||
RUN apt update && apt install -y curl libasan5
|
||||
RUN apt update && apt install -y curl libasan5 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir -p /usr/share/tessdata && \
|
||||
cd /usr/share/tessdata/ && \
|
||||
@@ -20,9 +20,9 @@ RUN mkdir -p /usr/share/tessdata && \
|
||||
curl -o /usr/share/tessdata/rus.traineddata https://raw.githubusercontent.com/tesseract-ocr/tessdata/master/rus.traineddata &&\
|
||||
curl -o /usr/share/tessdata/spa.traineddata https://raw.githubusercontent.com/tesseract-ocr/tessdata/master/spa.traineddata
|
||||
|
||||
COPY --from=build /build/sist2 /root/sist2
|
||||
|
||||
ENV LANG C.UTF-8
|
||||
ENV LC_ALL C.UTF-8
|
||||
|
||||
ENTRYPOINT ["/root/sist2"]
|
||||
|
||||
COPY --from=build /build/sist2 /root/sist2
|
||||
70
README.md
70
README.md
@@ -2,7 +2,7 @@
|
||||
[](https://www.codefactor.io/repository/github/simon987/sist2)
|
||||
[](https://files.simon987.net/.gate/sist2/simon987_sist2/)
|
||||
|
||||
**Demo**: [sist2.simon987.net](https://sist2.simon987.net/?i=Demo%20files)
|
||||
**Demo**: [sist2.simon987.net](https://sist2.simon987.net/)
|
||||
|
||||
# sist2
|
||||
|
||||
@@ -10,7 +10,7 @@ sist2 (Simple incremental search tool)
|
||||
|
||||
*Warning: sist2 is in early development*
|
||||
|
||||

|
||||

|
||||
|
||||
## Features
|
||||
|
||||
@@ -33,12 +33,11 @@ sist2 (Simple incremental search tool)
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Have an Elasticsearch (>= 6.X.X) instance running
|
||||
1. Have an Elasticsearch (>= 6.8.X, ideally >=7.14.0) instance running
|
||||
1. Download [from official website](https://www.elastic.co/downloads/elasticsearch)
|
||||
1. *(or)* Run using docker:
|
||||
```bash
|
||||
docker run -d --name es1 --net sist2_net -p 9200:9200 \
|
||||
-e "discovery.type=single-node" elasticsearch:7.14.0
|
||||
docker run -d -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.14.0
|
||||
```
|
||||
1. *(or)* Run using docker-compose:
|
||||
```yaml
|
||||
@@ -49,10 +48,11 @@ sist2 (Simple incremental search tool)
|
||||
- "ES_JAVA_OPTS=-Xms1G -Xmx2G"
|
||||
```
|
||||
1. Download sist2 executable
|
||||
1. Download the [latest sist2 release](https://github.com/simon987/sist2/releases) *
|
||||
1. *(or)* Download a [development snapshot](https://files.simon987.net/.gate/sist2/simon987_sist2/) *(Not
|
||||
1. Download the [latest sist2 release](https://github.com/simon987/sist2/releases).
|
||||
Select the file corresponding to your CPU architecture and mark the binary as executable with `chmod +x` *
|
||||
2. *(or)* Download a [development snapshot](https://files.simon987.net/.gate/sist2/simon987_sist2/) *(Not
|
||||
recommended!)*
|
||||
1. *(or)* `docker pull simon987/sist2:2.11.2-x64-linux`
|
||||
3. *(or)* `docker pull simon987/sist2:2.11.7-x64-linux`
|
||||
|
||||
1. See [Usage guide](docs/USAGE.md)
|
||||
|
||||
@@ -68,23 +68,23 @@ See [Usage guide](docs/USAGE.md) for more details
|
||||
|
||||
## Format support
|
||||
|
||||
File type | Library | Content | Thumbnail | Metadata
|
||||
:---|:---|:---|:---|:---
|
||||
pdf,xps,fb2,epub | MuPDF | text+ocr | yes | author, title |
|
||||
cbz,cbr | [libscan](https://github.com/simon987/libscan) | - | yes | - |
|
||||
`audio/*` | ffmpeg | - | yes | ID3 tags |
|
||||
`video/*` | ffmpeg | - | yes | title, comment, artist |
|
||||
`image/*` | ffmpeg | - | yes | [Common EXIF tags](https://github.com/simon987/sist2/blob/efdde2734eca9b14a54f84568863b7ffd59bdba3/src/parsing/media.c#L190), GPS tags |
|
||||
raw, rw2, dng, cr2, crw, dcr, k25, kdc, mrw, pef, xf3, arw, sr2, srf, erf | LibRaw | - | yes | Common EXIF tags, GPS tags |
|
||||
ttf,ttc,cff,woff,fnt,otf | Freetype2 | - | yes, `bmp` | Name & style |
|
||||
`text/plain` | [libscan](https://github.com/simon987/libscan) | yes | no | - |
|
||||
html, xml | [libscan](https://github.com/simon987/libscan) | yes | no | - |
|
||||
tar, zip, rar, 7z, ar ... | Libarchive | yes\* | - | no |
|
||||
docx, xlsx, pptx | [libscan](https://github.com/simon987/libscan) | yes | if embedded | creator, modified_by, title |
|
||||
doc (MS Word 97-2003) | antiword | yes | yes | author, title |
|
||||
mobi, azw, azw3 | libmobi | yes | no | author, title |
|
||||
wpd (WordPerfect) | libwpd | yes | no | *planned* |
|
||||
json, jsonl, ndjson | [libscan](https://github.com/simon987/libscan) | yes | - | - |
|
||||
| File type | Library | Content | Thumbnail | Metadata |
|
||||
|:--------------------------------------------------------------------------|:-----------------------------------------------------------------------------|:---------|:------------|:---------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| pdf,xps,fb2,epub | MuPDF | text+ocr | yes | author, title |
|
||||
| cbz,cbr | [libscan](https://github.com/simon987/sist2/tree/master/third-party/libscan) | - | yes | - |
|
||||
| `audio/*` | ffmpeg | - | yes | ID3 tags |
|
||||
| `video/*` | ffmpeg | - | yes | title, comment, artist |
|
||||
| `image/*` | ffmpeg | ocr | yes | [Common EXIF tags](https://github.com/simon987/sist2/blob/efdde2734eca9b14a54f84568863b7ffd59bdba3/src/parsing/media.c#L190), GPS tags |
|
||||
| raw, rw2, dng, cr2, crw, dcr, k25, kdc, mrw, pef, xf3, arw, sr2, srf, erf | LibRaw | no | yes | Common EXIF tags, GPS tags |
|
||||
| ttf,ttc,cff,woff,fnt,otf | Freetype2 | - | yes, `bmp` | Name & style |
|
||||
| `text/plain` | [libscan](https://github.com/simon987/sist2/tree/master/third-party/libscan) | yes | no | - |
|
||||
| html, xml | [libscan](https://github.com/simon987/sist2/tree/master/third-party/libscan) | yes | no | - |
|
||||
| tar, zip, rar, 7z, ar ... | Libarchive | yes\* | - | no |
|
||||
| docx, xlsx, pptx | [libscan](https://github.com/simon987/sist2/tree/master/third-party/libscan) | yes | if embedded | creator, modified_by, title |
|
||||
| doc (MS Word 97-2003) | antiword | yes | yes | author, title |
|
||||
| mobi, azw, azw3 | libmobi | yes | no | author, title |
|
||||
| wpd (WordPerfect) | libwpd | yes | no | *planned* |
|
||||
| json, jsonl, ndjson | [libscan](https://github.com/simon987/sist2/tree/master/third-party/libscan) | yes | - | - |
|
||||
|
||||
\* *See [Archive files](#archive-files)*
|
||||
|
||||
@@ -103,18 +103,24 @@ scan is also supported.
|
||||
|
||||
### OCR
|
||||
|
||||
You can enable OCR support for pdf,xps,fb2,epub file types with the
|
||||
`--ocr <lang>` option. Download the language data files with your package manager (`apt install tesseract-ocr-eng`) or
|
||||
You can enable OCR support for ebook (pdf,xps,fb2,epub) or image file types with the
|
||||
`--ocr-lang <lang>` option in combination with `--ocr-images` and/or `--ocr-ebooks`.
|
||||
Download the language data files with your package manager (`apt install tesseract-ocr-eng`) or
|
||||
directly [from Github](https://github.com/tesseract-ocr/tesseract/wiki/Data-Files).
|
||||
|
||||
The `simon987/sist2` image comes with common languages
|
||||
(hin, jpn, eng, fra, rus, spa) pre-installed.
|
||||
|
||||
Examples
|
||||
You can use the `+` separator to specify multiple languages. The language
|
||||
name must be identical to the `*.traineddata` file installed on your system
|
||||
(use `chi_sim` rather than `chi-sim`).
|
||||
|
||||
Examples:
|
||||
|
||||
```bash
|
||||
sist2 scan --ocr jpn ~/Books/Manga/
|
||||
sist2 scan --ocr eng ~/Books/Textbooks/
|
||||
sist2 scan --ocr-ebooks --ocr-lang jpn ~/Books/Manga/
|
||||
sist2 scan --ocr-images --ocr-lang eng ~/Images/Screenshots/
|
||||
sist2 scan --ocr-ebooks --ocr-images --ocr-lang eng+chi_sim ~/Chinese-Bilingual/
|
||||
```
|
||||
|
||||
## Build from source
|
||||
@@ -127,7 +133,7 @@ You can compile **sist2** by yourself if you don't want to use the pre-compiled
|
||||
git clone --recursive https://github.com/simon987/sist2/
|
||||
cd sist2
|
||||
docker build . -f ./Dockerfile -t my-sist2-image
|
||||
docker run --rm my-sist2-image cat /root/sist2 > sist2-x64-linux
|
||||
docker run --rm --entrypoint cat my-sist2-image /root/sist2 > sist2-x64-linux
|
||||
```
|
||||
|
||||
### On a linux computer
|
||||
@@ -144,7 +150,7 @@ docker run --rm my-sist2-image cat /root/sist2 > sist2-x64-linux
|
||||
|
||||
```bash
|
||||
vcpkg install curl[core,openssl]
|
||||
vcpkg install lmdb cjson glib brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libuuid libmagic libraw jasper lcms gumbo
|
||||
vcpkg install lmdb cjson glib brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libmagic libraw jasper lcms gumbo
|
||||
```
|
||||
|
||||
1. Build
|
||||
|
||||
7
contrib/systemd/Makefile
Normal file
7
contrib/systemd/Makefile
Normal file
@@ -0,0 +1,7 @@
|
||||
install:
|
||||
install sist2-update-all.sh /usr/bin/sist2-update-all.sh
|
||||
install sist2-update-files.sh /usr/bin/sist2-update-files.sh
|
||||
install sist2-update-nextcloud.sh /usr/bin/sist2-update-nextcloud.sh
|
||||
install sist2-update.service /etc/systemd/system/sist2-update.service
|
||||
install sist2-update.timer /etc/systemd/system/sist2-update.timer
|
||||
systemctl daemon-reload
|
||||
31
contrib/systemd/README.md
Normal file
31
contrib/systemd/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Systemd integration example
|
||||
|
||||
This example contains my (yatli) personal configuration for sist2 auto-updating.
|
||||
The following indices are involved in this configuration:
|
||||
|
||||
| Index | Path | Description |
|
||||
|-----------|------------------|--------------------------------------------|
|
||||
| files | /zpool/files | Main file repository |
|
||||
| nextcloud | /zpool/nextcloud | Externally synchronized to a cloud account |
|
||||
|
||||
The systemd integration achieves automatic sist2 scanning & indexing everyday at 3:00AM.
|
||||
|
||||
### Tailoring the configuration for yourself
|
||||
|
||||
`sist2-update-all.sh` calls update scripts for each sist2 index. Add or remove
|
||||
update scripts accordingly to suit your need. Each update script (e.g.
|
||||
`sist2-update-files.sh`) has important parameters laid down at the beginning so
|
||||
make sure to edit them to point to your files and index locations.
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# install the services and scripts
|
||||
sudo make install
|
||||
# enable & start the timer
|
||||
sudo systemctl enable sist2-update.timer
|
||||
sudo systemctl start sist2-update.timer
|
||||
# verify that the timer has been enabled
|
||||
systemctl list-timers --all
|
||||
```
|
||||
|
||||
9
contrib/systemd/sist2-update-all.sh
Executable file
9
contrib/systemd/sist2-update-all.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
__dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
echo "Update index: Files"
|
||||
source ${__dir}/sist2-update-files.sh
|
||||
echo "Update index: Nextcloud"
|
||||
source ${__dir}/sist2-update-nextcloud.sh
|
||||
echo "Done. Restarting sist2."
|
||||
docker restart sist2-sist2-1
|
||||
34
contrib/systemd/sist2-update-files.sh
Executable file
34
contrib/systemd/sist2-update-files.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
DATE=$(date +%Y_%m_%d)
|
||||
CONTENT=/zpool/files
|
||||
ORIG=/mnt/ssd/sist-index/files.idx
|
||||
NEW=/mnt/ssd/sist-index/files_$DATE.idx
|
||||
EXCLUDE='ZArchives|TorrentStore|TorrentDownload|624f0c59-1fef-44f6-95e9-7483296f2833|ubuntu-full-2021-12-07'
|
||||
NAME=Files
|
||||
#REWRITE_URL="http://localhost:33333/activate?collection=$NAME&path="
|
||||
REWRITE_URL=""
|
||||
|
||||
sist2 scan \
|
||||
--threads 14 \
|
||||
--mem-throttle 32768 \
|
||||
--quality 1.0 \
|
||||
--name $NAME \
|
||||
--ocr-lang=eng+chi_sim \
|
||||
--ocr-ebooks \
|
||||
--ocr-images \
|
||||
--exclude=$EXCLUDE \
|
||||
--rewrite-url=$REWRITE_URL \
|
||||
--incremental=$ORIG \
|
||||
--output=$NEW \
|
||||
$CONTENT
|
||||
echo ">>> Scan complete"
|
||||
rm -rf $ORIG
|
||||
mv $NEW $ORIG
|
||||
|
||||
unset http_proxy
|
||||
unset https_proxy
|
||||
unset HTTP_PROXY
|
||||
unset HTTPS_PROXY
|
||||
sist2 index $ORIG --incremental-index
|
||||
echo ">>> Index complete"
|
||||
33
contrib/systemd/sist2-update-nextcloud.sh
Executable file
33
contrib/systemd/sist2-update-nextcloud.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
DATE=$(date +%Y_%m_%d)
|
||||
CONTENT=/zpool/nextcloud/v-yadli
|
||||
ORIG=/mnt/ssd/sist-index/nextcloud.idx
|
||||
NEW=/mnt/ssd/sist-index/nextcloud_$DATE.idx
|
||||
EXCLUDE='Yatao|.*263418493\\/Image\\/.*'
|
||||
NAME=NextCloud
|
||||
# REWRITE_URL="http://localhost:33333/activate?collection=$NAME&path="
|
||||
REWRITE_URL=""
|
||||
|
||||
sist2 scan \
|
||||
--threads 14 \
|
||||
--mem-throttle 32768 \
|
||||
--quality 1.0 \
|
||||
--name $NAME \
|
||||
--ocr-lang=eng+chi_sim \
|
||||
--ocr-ebooks \
|
||||
--ocr-images \
|
||||
--exclude=$EXCLUDE \
|
||||
--rewrite-url=$REWRITE_URL \
|
||||
--incremental=$ORIG \
|
||||
--output=$NEW \
|
||||
$CONTENT
|
||||
echo ">>> Scan complete"
|
||||
rm -rf $ORIG
|
||||
mv $NEW $ORIG
|
||||
|
||||
unset http_proxy
|
||||
unset https_proxy
|
||||
unset HTTP_PROXY
|
||||
unset HTTPS_PROXY
|
||||
sist2 index $ORIG --incremental-index
|
||||
6
contrib/systemd/sist2-update.service
Normal file
6
contrib/systemd/sist2-update.service
Normal file
@@ -0,0 +1,6 @@
|
||||
[Unit]
|
||||
Description=sist2-update
|
||||
|
||||
[Service]
|
||||
User=yatli
|
||||
ExecStart=/bin/bash /usr/bin/sist2-update-all.sh
|
||||
10
contrib/systemd/sist2-update.timer
Normal file
10
contrib/systemd/sist2-update.timer
Normal file
@@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=sist2-update
|
||||
|
||||
[Timer]
|
||||
OnCalendar=*-*-* 3:00:00
|
||||
Persistent=true
|
||||
Unit=sist2-update.service
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
@@ -13,7 +13,7 @@
|
||||
* [options](#web-options)
|
||||
* [examples](#web-examples)
|
||||
* [rewrite_url](#rewrite_url)
|
||||
* [link to specific indices](#link-to-specific-indices)
|
||||
* [elasticsearch](#elasticsearch)
|
||||
* [exec-script](#exec-script)
|
||||
* [tagging](#tagging)
|
||||
* [sidecar files](#sidecar-files)
|
||||
@@ -32,9 +32,11 @@ Lightning-fast file system indexer and search tool.
|
||||
|
||||
Scan options
|
||||
-t, --threads=<int> Number of threads. DEFAULT=1
|
||||
-q, --quality=<flt> Thumbnail quality, on a scale of 1.0 to 31.0, 1.0 being the best. DEFAULT=3
|
||||
--size=<int> Thumbnail size, in pixels. Use negative value to disable. DEFAULT=500
|
||||
--content-size=<int> Number of bytes to be extracted from text documents. Use negative value to disable. DEFAULT=32768
|
||||
--mem-throttle=<int> Total memory threshold in MiB for scan throttling. DEFAULT=0
|
||||
-q, --thumbnail-quality=<flt> Thumbnail quality, on a scale of 1.0 to 31.0, 1.0 being the best. DEFAULT=1
|
||||
--thumbnail-size=<int> Thumbnail size, in pixels. DEFAULT=500
|
||||
--thumbnail-count=<int> Number of thumbnails to generate. Set a value > 1 to create video previews, set to 0 to disable thumbnails. DEFAULT=1
|
||||
--content-size=<int> Number of bytes to be extracted from text documents. Set to 0 to disable. DEFAULT=32768
|
||||
--incremental=<str> Reuse an existing index and only scan modified files.
|
||||
-o, --output=<str> Output directory. DEFAULT=index.sist2/
|
||||
--rewrite-url=<str> Serve files from this url instead of from disk.
|
||||
@@ -42,20 +44,24 @@ Scan options
|
||||
--depth=<int> Scan up to DEPTH subdirectories deep. Use 0 to only scan files in PATH. DEFAULT: -1
|
||||
--archive=<str> Archive file mode (skip|list|shallow|recurse). skip: Don't parse, list: only get file names as text, shallow: Don't parse archives inside archives. DEFAULT: recurse
|
||||
--archive-passphrase=<str> Passphrase for encrypted archive files
|
||||
--ocr=<str> Tesseract language (use tesseract --list-langs to see which are installed on your machine)
|
||||
--ocr-lang=<str> Tesseract language (use 'tesseract --list-langs' to see which are installed on your machine)
|
||||
--ocr-images Enable OCR'ing of image files.
|
||||
--ocr-ebooks Enable OCR'ing of ebook files.
|
||||
-e, --exclude=<str> Files that match this regex will not be scanned
|
||||
--fast Only index file names & mime type
|
||||
--treemap-threshold=<str> Relative size threshold for treemap (see USAGE.md). DEFAULT: 0.0005
|
||||
--mem-buffer=<int> Maximum memory buffer size per thread in MB for files inside archives (see USAGE.md). DEFAULT: 2000
|
||||
--mem-buffer=<int> Maximum memory buffer size per thread in MiB for files inside archives (see USAGE.md). DEFAULT: 2000
|
||||
--read-subtitles Read subtitles from media files.
|
||||
--fast-epub Faster but less accurate EPUB parsing (no thumbnails, metadata)
|
||||
--checksums Calculate file checksums when scanning.
|
||||
--list-file=<str> Specify a list of newline-delimited paths to be scanned instead of normal directory traversal. Use '-' to read from stdin.
|
||||
|
||||
Index options
|
||||
-t, --threads=<int> Number of threads. DEFAULT=1
|
||||
--es-url=<str> Elasticsearch url with port. DEFAULT=http://localhost:9200
|
||||
--es-index=<str> Elasticsearch index name. DEFAULT=sist2
|
||||
-p, --print Just print JSON documents to stdout.
|
||||
--incremental-index Conduct incremental indexing, assumes that the old index is already digested by Elasticsearch.
|
||||
--script-file=<str> Path to user script.
|
||||
--mappings-file=<str> Path to Elasticsearch mappings.
|
||||
--settings-file=<str> Path to Elasticsearch settings.
|
||||
@@ -71,12 +77,14 @@ Web options
|
||||
--tag-auth=<str> Basic auth in user:password format for tagging
|
||||
--tagline=<str> Tagline in navbar
|
||||
--dev Serve html & js files from disk (for development)
|
||||
--lang=<str> Default UI language. Can be changed by the user
|
||||
|
||||
Exec-script options
|
||||
--es-url=<str> Elasticsearch url. DEFAULT=http://localhost:9200
|
||||
--es-index=<str> Elasticsearch index name. DEFAULT=sist2
|
||||
--script-file=<str> Path to user script.
|
||||
--async-script Execute user script asynchronously.
|
||||
Made by simon987 <me@simon987.net>. Released under GPL-3.0
|
||||
```
|
||||
|
||||
## Scan
|
||||
@@ -84,14 +92,22 @@ Exec-script options
|
||||
### Scan options
|
||||
|
||||
* `-t, --threads`
|
||||
Number of threads for file parsing. **Do not set a number higher than `$(nproc)` or `$(Get-WmiObject Win32_ComputerSystem).NumberOfLogicalProcessors` in Windows!**
|
||||
* `-q, --quality`
|
||||
Number of threads for file parsing. **Do not set a number higher than `$(nproc)` or `$(Get-CimInstance Win32_ComputerSystem).NumberOfLogicalProcessors` in Windows!**
|
||||
* `--mem-throttle`
|
||||
Total memory threshold in MiB for scan throttling. Worker threads will not start a new parse job
|
||||
until the total memory usage of sist2 is below this threshold. Set to 0 to disable. DEFAULT=0
|
||||
* `-q, --thumbnail-quality`
|
||||
Thumbnail quality, on a scale of 1.0 to 31.0, 1.0 being the best.
|
||||
* `--size`
|
||||
* `--thumbnail-size`
|
||||
Thumbnail size in pixels.
|
||||
* `--thumbnail-count`
|
||||
Maximum number of thumbnails to generate. When set to a value >= 2, thumbnails for video previews
|
||||
will be generated. The actual number of thumbnails generated depends on the length of the video (maximum 1 image
|
||||
every ~5s). Set to 0 to completely disable thumbnails.
|
||||
* `--content-size`
|
||||
Number of bytes of text to be extracted from the content of files (plain text and PDFs).
|
||||
Number of bytes of text to be extracted from the content of files (plain text, PDFs etc.).
|
||||
Repeated whitespace and special characters do not count toward this limit.
|
||||
Set to 0 to completely disable content parsing.
|
||||
* `--incremental`
|
||||
Specify an existing index. Information about files in this index that were not modified (based on *mtime* attribute)
|
||||
will be copied to the new index and will not be parsed again.
|
||||
@@ -104,7 +120,7 @@ Exec-script options
|
||||
* list: Only get file names as text
|
||||
* shallow: Don't parse archives inside archives.
|
||||
* recurse: Scan archives recursively (default)
|
||||
* `--ocr` See [OCR](../README.md#OCR)
|
||||
* `--ocr-lang`, `--ocr-ebooks`, `--ocr-images` See [OCR](../README.md#OCR)
|
||||
* `-e, --exclude` Regex pattern to exclude files. A file is excluded if the pattern matches any
|
||||
part of the full absolute path.
|
||||
|
||||
@@ -124,13 +140,13 @@ Exec-script options
|
||||
In effect, smaller `treemap-threshold` values will yield a more detailed
|
||||
(but also a more cluttered and harder to read) visualization.
|
||||
|
||||
* `--mem-buffer` Maximum memory buffer size in MB (per thread) for files inside archives. Media files
|
||||
* `--mem-buffer` Maximum memory buffer size in MiB (per thread) for files inside archives. Media files
|
||||
larger than this number will be read sequentially and no *seek* operations will be supported.
|
||||
|
||||
To check if a media file can be parsed without *seek*, execute `cat file.mp4 | ffprobe -`
|
||||
* `--read-subtitles` When enabled, will attempt to read the subtitles stream from media files.
|
||||
* `--fast-epub` Much faster but less accurate EPUB parsing. When enabled, sist2 will use a simple HTML parser to read epub files instead of the MuPDF library. No thumbnails are generated and author/title metadata are not parsed.
|
||||
* `--checksums` Calculate file checksums (sha1) when scanning files. This option does not cause any additional read
|
||||
* `--checksums` Calculate file checksums (SHA1) when scanning files. This option does not cause any additional read
|
||||
operations. Checksums are not calculated for all file types, unless the file is inside an archive. When enabled, duplicate
|
||||
files are hidden in the web UI (this behaviour can be toggled in the Configuration page).
|
||||
|
||||
@@ -200,6 +216,9 @@ and values are raw image bytes.
|
||||
Elasticsearch index name. DEFAULT=sist2
|
||||
* `-p, --print`
|
||||
Print index in JSON format to stdout.
|
||||
* `--incremental-index`
|
||||
Conduct incremental indexing. Assumes that the old index is already ingested in Elasticsearch.
|
||||
Only the new changes since the last scan will be sent.
|
||||
* `--script-file`
|
||||
Path to user script. See [Scripting](scripting.md).
|
||||
* `--mappings-file`
|
||||
@@ -247,6 +266,8 @@ sist2 index --print ./my_index/ | jq | less
|
||||
`--auth` argument, but authentication is only applied the `/tag/` endpoint.
|
||||
* `--tagline=<str>` When specified, will replace the default tagline in the navbar.
|
||||
* `--dev` Serve html & js files from disk (for development, used to modify frontend files without having to recompile)
|
||||
* `--lang=<str>` Set the default web UI language (See #180 for a list of supported languages, default
|
||||
is `en`). The user can change the language in the configuration page
|
||||
|
||||
### Web examples
|
||||
|
||||
@@ -266,9 +287,20 @@ sist2 web index1 index2 index3 index4
|
||||
When the `rewrite_url` field is not empty, the web module ignores the `root`
|
||||
field and will return a HTTP redirect to `<rewrite_url><path>/<name><extension>`
|
||||
instead of serving the file from disk.
|
||||
Both the `root` and `rewrite_url` fields are safe to manually modify from the
|
||||
Both the `root` and `rewrite_url` fields are safe to manually modify from the
|
||||
`descriptor.json` file.
|
||||
|
||||
# Elasticsearch
|
||||
|
||||
Elasticsearch versions >=6.8.0, <8.0.0 are supported by sist2.
|
||||
|
||||
Using a version >=7.14.0 is recommended to enable the following features:
|
||||
|
||||
- Bug fix for large documents (See #198)
|
||||
|
||||
When using a legacy version of ES, a notice will be displayed next to the sist2 version in the web UI.
|
||||
If you don't care about the features above, you can ignore it or disable it in the configuration page.
|
||||
|
||||
## exec-script
|
||||
|
||||
The `exec-script` command is used to execute a user script for an index that has already been imported to Elasticsearch with the `index` command. Note that the documents will not be reset to their default state before each execution as the `index` command does: if you make undesired changes to the documents by accident, you will need to run `index` again to revert to the original state.
|
||||
@@ -303,7 +335,7 @@ See [scripting](scripting.md) documentation.
|
||||
# Sidecar files
|
||||
|
||||
When scanning, sist2 will read metadata from `.s2meta` JSON files and overwrite the
|
||||
original document's metadata. Sidecar metadata files will also work inside archives.
|
||||
original document's indexed metadata (does not modify the actual file). Sidecar metadata files will also work inside archives.
|
||||
Sidecar files themselves are not saved in the index.
|
||||
|
||||
This feature is useful to leverage third-party applications such as speech-to-text or
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 3.9 KiB After Width: | Height: | Size: 35 KiB |
BIN
docs/sist2.png
BIN
docs/sist2.png
Binary file not shown.
|
Before Width: | Height: | Size: 889 KiB After Width: | Height: | Size: 1011 KiB |
@@ -39,7 +39,7 @@
|
||||
"index": false
|
||||
},
|
||||
"thumbnail": {
|
||||
"type": "keyword",
|
||||
"type": "integer",
|
||||
"index": false
|
||||
},
|
||||
"videoc": {
|
||||
@@ -78,6 +78,7 @@
|
||||
"name": {
|
||||
"analyzer": "content_analyzer",
|
||||
"type": "text",
|
||||
"fielddata": true,
|
||||
"fields": {
|
||||
"nGram": {
|
||||
"type": "text",
|
||||
|
||||
58
schema/settings_legacy.json
Normal file
58
schema/settings_legacy.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"index": {
|
||||
"refresh_interval": "30s",
|
||||
"codec": "best_compression",
|
||||
"number_of_replicas": 0
|
||||
},
|
||||
"analysis": {
|
||||
"tokenizer": {
|
||||
"path_tokenizer": {
|
||||
"type": "path_hierarchy",
|
||||
"delimiter": "/"
|
||||
},
|
||||
"tag_tokenizer": {
|
||||
"type": "path_hierarchy",
|
||||
"delimiter": "."
|
||||
},
|
||||
"my_nGram_tokenizer": {
|
||||
"type": "nGram",
|
||||
"min_gram": 3,
|
||||
"max_gram": 3
|
||||
}
|
||||
},
|
||||
"analyzer": {
|
||||
"path_analyzer": {
|
||||
"tokenizer": "path_tokenizer",
|
||||
"filter": [
|
||||
"lowercase"
|
||||
]
|
||||
},
|
||||
"tag_analyzer": {
|
||||
"tokenizer": "tag_tokenizer",
|
||||
"filter": [
|
||||
"lowercase"
|
||||
]
|
||||
},
|
||||
"case_insensitive_kw_analyzer": {
|
||||
"tokenizer": "keyword",
|
||||
"filter": [
|
||||
"lowercase"
|
||||
]
|
||||
},
|
||||
"my_nGram": {
|
||||
"tokenizer": "my_nGram_tokenizer",
|
||||
"filter": [
|
||||
"lowercase",
|
||||
"asciifolding"
|
||||
]
|
||||
},
|
||||
"content_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [
|
||||
"lowercase",
|
||||
"asciifolding"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,4 +7,3 @@ python3 scripts/serve_static.py > src/web/static_generated.c
|
||||
python3 scripts/index_static.py > src/index/static_generated.c
|
||||
|
||||
printf "static const char *const Sist2CommitHash = \"%s\";\n" $(git rev-parse HEAD) > src/git_hash.h
|
||||
printf "static const char *const LibScanCommitHash = \"%s\";\n" $(cd third-party/libscan/ && git rev-parse HEAD) >> src/git_hash.h
|
||||
|
||||
@@ -3,6 +3,7 @@ import json
|
||||
files = [
|
||||
"schema/mappings.json",
|
||||
"schema/settings.json",
|
||||
"schema/settings_legacy.json",
|
||||
"schema/pipeline.json",
|
||||
]
|
||||
|
||||
|
||||
2
scripts/start_dev_es.sh
Executable file
2
scripts/start_dev_es.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
docker run --rm -it -p 9200:9200 -e "discovery.type=single-node" \
|
||||
-e "ES_JAVA_OPTS=-Xms8g -Xmx8g" elasticsearch:7.14.0
|
||||
2
sist2-vue/dist/css/index.css
vendored
2
sist2-vue/dist/css/index.css
vendored
File diff suppressed because one or more lines are too long
16
sist2-vue/dist/js/chunk-vendors.js
vendored
16
sist2-vue/dist/js/chunk-vendors.js
vendored
File diff suppressed because one or more lines are too long
2
sist2-vue/dist/js/index.js
vendored
2
sist2-vue/dist/js/index.js
vendored
File diff suppressed because one or more lines are too long
4204
sist2-vue/package-lock.json
generated
4204
sist2-vue/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@egjs/vue-infinitegrid": "3.3.0",
|
||||
"axios": "^0.21.1",
|
||||
"axios": "^0.25.0",
|
||||
"bootstrap-vue": "^2.21.2",
|
||||
"core-js": "^3.6.5",
|
||||
"crypto-es": "^1.2.7",
|
||||
|
||||
@@ -146,6 +146,7 @@ html, body {
|
||||
|
||||
.theme-black .nav-tabs .nav-link {
|
||||
color: #e0e0e0;
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.theme-black .nav-tabs .nav-item.show .nav-link, .theme-black .nav-tabs .nav-link.active {
|
||||
@@ -309,4 +310,8 @@ mark {
|
||||
display: inline-block;
|
||||
width: 40%;
|
||||
}
|
||||
|
||||
.pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -51,6 +51,7 @@ export interface EsHit {
|
||||
duration: number
|
||||
tag: string[]
|
||||
checksum: string
|
||||
thumbnail: string
|
||||
}
|
||||
_props: {
|
||||
isSubDocument: boolean
|
||||
@@ -61,6 +62,9 @@ export interface EsHit {
|
||||
isPlayableImage: boolean
|
||||
isAudio: boolean
|
||||
hasThumbnail: boolean
|
||||
hasVidPreview: boolean
|
||||
/** Number of thumbnails available */
|
||||
tnNum: number
|
||||
}
|
||||
highlight: {
|
||||
name: string[] | undefined,
|
||||
@@ -131,6 +135,15 @@ class Sist2Api {
|
||||
|
||||
if ("thumbnail" in hit._source) {
|
||||
hit._props.hasThumbnail = true;
|
||||
|
||||
if (Number.isNaN(Number(hit._source.thumbnail))) {
|
||||
// Backwards compatibility
|
||||
hit._props.tnNum = 1;
|
||||
hit._props.hasVidPreview = false;
|
||||
} else {
|
||||
hit._props.tnNum = Number(hit._source.thumbnail);
|
||||
hit._props.hasVidPreview = hit._props.tnNum > 1;
|
||||
}
|
||||
}
|
||||
|
||||
switch (mimeCategory) {
|
||||
@@ -251,20 +264,31 @@ class Sist2Api {
|
||||
});
|
||||
}
|
||||
|
||||
getMimeTypes() {
|
||||
return this.esQuery({
|
||||
aggs: {
|
||||
getMimeTypes(query = undefined) {
|
||||
const AGGS = {
|
||||
mimeTypes: {
|
||||
terms: {
|
||||
field: "mime",
|
||||
size: 10000
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if (!query) {
|
||||
query = {
|
||||
aggs: AGGS,
|
||||
size: 0,
|
||||
}).then(resp => {
|
||||
};
|
||||
} else {
|
||||
query.size = 0;
|
||||
query.aggs = AGGS;
|
||||
}
|
||||
|
||||
return this.esQuery(query).then(resp => {
|
||||
const mimeMap: any[] = [];
|
||||
resp["aggregations"]["mimeTypes"]["buckets"].sort((a: any, b: any) => a.key > b.key).forEach((bucket: any) => {
|
||||
const buckets = resp["aggregations"]["mimeTypes"]["buckets"];
|
||||
|
||||
buckets.sort((a: any, b: any) => a.key > b.key).forEach((bucket: any) => {
|
||||
const tmp = bucket["key"].split("/");
|
||||
const category = tmp[0];
|
||||
const mime = tmp[1];
|
||||
@@ -284,11 +308,18 @@ class Sist2Api {
|
||||
});
|
||||
|
||||
if (!category_exists) {
|
||||
mimeMap.push({"text": category, children: [child]});
|
||||
mimeMap.push({text: category, children: [child], id: category});
|
||||
}
|
||||
})
|
||||
|
||||
return mimeMap;
|
||||
mimeMap.forEach(node => {
|
||||
if (node.children) {
|
||||
node.children.sort((a, b) => a.id.localeCompare(b.id));
|
||||
}
|
||||
})
|
||||
mimeMap.sort((a, b) => a.id.localeCompare(b.id))
|
||||
|
||||
return {buckets, mimeMap};
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -43,6 +43,20 @@ const SORT_MODES = {
|
||||
{_tie: {order: "asc"}}
|
||||
],
|
||||
key: (hit: EsHit) => hit._source.size
|
||||
},
|
||||
nameAsc: {
|
||||
mode: [
|
||||
{name: {order: "asc"}},
|
||||
{_tie: {order: "asc"}}
|
||||
],
|
||||
key: (hit: EsHit) => hit._source.name
|
||||
},
|
||||
nameDesc: {
|
||||
mode: [
|
||||
{name: {order: "desc"}},
|
||||
{_tie: {order: "asc"}}
|
||||
],
|
||||
key: (hit: EsHit) => hit._source.name
|
||||
}
|
||||
} as any;
|
||||
|
||||
@@ -73,6 +87,8 @@ class Sist2Query {
|
||||
const selectedMimeTypes = getters.selectedMimeTypes;
|
||||
const selectedTags = getters.selectedTags;
|
||||
|
||||
const legacyES = store.state.sist2Info.esVersionLegacy;
|
||||
|
||||
const filters = [
|
||||
{terms: {index: selectedIndexIds}}
|
||||
] as any[];
|
||||
@@ -187,9 +203,13 @@ class Sist2Query {
|
||||
"name.nGram": {},
|
||||
"content.nGram": {},
|
||||
font_name: {},
|
||||
},
|
||||
max_analyzed_offset: 9_999_999
|
||||
}
|
||||
};
|
||||
|
||||
if (!legacyES) {
|
||||
q.highlight.max_analyzed_offset = 9_999_999;
|
||||
}
|
||||
|
||||
if (getters.optSearchInPath) {
|
||||
q.highlight.fields["path.text"] = {};
|
||||
q.highlight.fields["path.nGram"] = {};
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
<template>
|
||||
<div v-if="$store.state.optUseDatePicker">
|
||||
<b-row>
|
||||
<b-col sm="6">
|
||||
<b-form-datepicker
|
||||
value-as-date
|
||||
:date-format-options="{ year: 'numeric', month: '2-digit', day: '2-digit' }"
|
||||
:locale="$store.state.optLang"
|
||||
class="mb-2"
|
||||
:value="dateMin" @input="setDateMin"></b-form-datepicker>
|
||||
</b-col>
|
||||
<b-col sm="6">
|
||||
<b-form-datepicker
|
||||
value-as-date
|
||||
:date-format-options="{ year: 'numeric', month: '2-digit', day: '2-digit' }"
|
||||
:locale="$store.state.optLang"
|
||||
class="mb-2"
|
||||
:value="dateMax" @input="setDateMax"></b-form-datepicker>
|
||||
</b-col>
|
||||
</b-row>
|
||||
</div>
|
||||
<div v-else>
|
||||
<b-row>
|
||||
<b-col style="height: 70px;">
|
||||
<div id="dateSlider"></div>
|
||||
</b-col>
|
||||
</b-row>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
@@ -10,11 +36,36 @@ import {mergeTooltips} from "@/util-js";
|
||||
|
||||
export default {
|
||||
name: "DateSlider",
|
||||
methods: {
|
||||
setDateMin(val) {
|
||||
const epochDate = Math.ceil(+val / 1000);
|
||||
this.$store.commit("setDateMin", epochDate);
|
||||
},
|
||||
setDateMax(val) {
|
||||
const epochDate = Math.ceil(+val / 1000);
|
||||
this.$store.commit("setDateMax", epochDate);
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
dateMin() {
|
||||
const dateMin = this.$store.state.dateMin ? this.$store.state.dateMin : this.$store.state.dateBoundsMin;
|
||||
return new Date(dateMin * 1000)
|
||||
},
|
||||
dateMax() {
|
||||
const dateMax = this.$store.state.dateMax ? this.$store.state.dateMax : this.$store.state.dateBoundsMax;
|
||||
return new Date(dateMax * 1000)
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.$store.subscribe((mutation) => {
|
||||
if (mutation.type === "setDateBoundsMax") {
|
||||
const elem = document.getElementById("dateSlider");
|
||||
|
||||
if (elem === null) {
|
||||
// Using b-form-datepicker, skip initialisation of slider
|
||||
return
|
||||
}
|
||||
|
||||
if (elem.children.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
<b-card-body>
|
||||
|
||||
<!-- TODO: ES connectivity, Link to GH page -->
|
||||
<b-table :items="tableItems" small borderless responsive="md" thead-class="hidden" class="mb-0"></b-table>
|
||||
|
||||
<hr />
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
<script>
|
||||
import IndexDebugInfo from "@/components/IndexDebugInfo";
|
||||
import DebugIcon from "@/components/DebugIcon";
|
||||
import DebugIcon from "@/components/icons/DebugIcon";
|
||||
|
||||
export default {
|
||||
name: "DebugInfo.vue",
|
||||
@@ -28,10 +27,13 @@ export default {
|
||||
{key: "platform", value: this.$store.state.sist2Info.platform},
|
||||
{key: "debugBinary", value: this.$store.state.sist2Info.debug},
|
||||
{key: "sist2CommitHash", value: this.$store.state.sist2Info.sist2Hash},
|
||||
{key: "libscanCommitHash", value: this.$store.state.sist2Info.libscanHash},
|
||||
{key: "esIndex", value: this.$store.state.sist2Info.esIndex},
|
||||
{key: "tagline", value: this.$store.state.sist2Info.tagline},
|
||||
{key: "dev", value: this.$store.state.sist2Info.dev},
|
||||
{key: "mongooseVersion", value: this.$store.state.sist2Info.mongooseVersion},
|
||||
{key: "esVersion", value: this.$store.state.sist2Info.esVersion},
|
||||
{key: "esVersionSupported", value: this.$store.state.sist2Info.esVersionSupported},
|
||||
{key: "esVersionLegacy", value: this.$store.state.sist2Info.esVersionLegacy},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<template>
|
||||
<div class="doc-card" :class="{'sub-document': doc._props.isSubDocument}" :style="`width: ${width}px`">
|
||||
<div class="doc-card" :class="{'sub-document': doc._props.isSubDocument}" :style="`width: ${width}px`"
|
||||
@click="$store.commit('busTnTouchStart', null)">
|
||||
<b-card
|
||||
no-body
|
||||
img-top
|
||||
@@ -10,36 +11,11 @@
|
||||
<ContentDiv :doc="doc"></ContentDiv>
|
||||
|
||||
<!-- Thumbnail-->
|
||||
<div v-if="doc._props.hasThumbnail" class="img-wrapper" @mouseenter="onTnEnter()" @mouseleave="onTnLeave()">
|
||||
<div v-if="doc._props.isAudio" class="card-img-overlay" :class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ humanTime(doc._source.duration) }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._props.isImage && !hover" class="card-img-overlay" :class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ `${doc._source.width}x${doc._source.height}` }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="(doc._props.isVideo || doc._props.isGif) && doc._source.duration > 0 && !hover" class="card-img-overlay"
|
||||
:class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ humanTime(doc._source.duration) }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._props.isPlayableVideo" class="play">
|
||||
<svg viewBox="0 0 494.942 494.942" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="m35.353 0 424.236 247.471-424.236 247.471z"/>
|
||||
</svg>
|
||||
</div>
|
||||
|
||||
<img v-if="doc._props.isPlayableImage || doc._props.isPlayableVideo"
|
||||
:src="(doc._props.isGif && hover) ? `f/${doc._id}` : `t/${doc._source.index}/${doc._id}`"
|
||||
alt=""
|
||||
class="pointer fit card-img-top" @click="onThumbnailClick()">
|
||||
<img v-else :src="`t/${doc._source.index}/${doc._id}`" alt=""
|
||||
class="fit card-img-top">
|
||||
</div>
|
||||
<FullThumbnail :doc="doc" :small-badge="smallBadge" @onThumbnailClick="onThumbnailClick()"></FullThumbnail>
|
||||
|
||||
<!-- Audio player-->
|
||||
<audio v-if="doc._props.isAudio" ref="audio" preload="none" class="audio-fit fit" controls :type="doc._source.mime"
|
||||
<audio v-if="doc._props.isAudio" ref="audio" preload="none" class="audio-fit fit" controls
|
||||
:type="doc._source.mime"
|
||||
:src="`f/${doc._id}`"
|
||||
@play="onAudioPlay()"></audio>
|
||||
|
||||
@@ -66,31 +42,19 @@ import TagContainer from "@/components/TagContainer.vue";
|
||||
import DocFileTitle from "@/components/DocFileTitle.vue";
|
||||
import DocInfoModal from "@/components/DocInfoModal.vue";
|
||||
import ContentDiv from "@/components/ContentDiv.vue";
|
||||
import FullThumbnail from "@/components/FullThumbnail";
|
||||
|
||||
|
||||
export default {
|
||||
components: {ContentDiv, DocInfoModal, DocFileTitle, TagContainer},
|
||||
components: {FullThumbnail, ContentDiv, DocInfoModal, DocFileTitle, TagContainer},
|
||||
props: ["doc", "width"],
|
||||
data() {
|
||||
return {
|
||||
ext: ext,
|
||||
showInfo: false,
|
||||
hover: false
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
placeHolderStyle() {
|
||||
|
||||
const tokens = this.doc._source.thumbnail.split(",");
|
||||
const w = Number(tokens[0]);
|
||||
const h = Number(tokens[1]);
|
||||
|
||||
const MAX_HEIGHT = 400;
|
||||
|
||||
return {
|
||||
height: `${Math.min((h / w) * this.width, MAX_HEIGHT)}px`,
|
||||
}
|
||||
},
|
||||
smallBadge() {
|
||||
return this.width < 150;
|
||||
}
|
||||
@@ -112,28 +76,10 @@ export default {
|
||||
}
|
||||
});
|
||||
},
|
||||
onTnEnter() {
|
||||
this.hover = true;
|
||||
},
|
||||
onTnLeave() {
|
||||
this.hover = false;
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
<style>
|
||||
.img-wrapper {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.img-wrapper:hover svg {
|
||||
fill: rgba(0, 0, 0, 1);
|
||||
}
|
||||
|
||||
.pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.fit {
|
||||
display: block;
|
||||
min-width: 64px;
|
||||
@@ -143,15 +89,17 @@ export default {
|
||||
width: auto;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.audio-fit {
|
||||
height: 39px;
|
||||
vertical-align: bottom;
|
||||
display: inline;
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.card-img-top {
|
||||
border-top-left-radius: 0;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
|
||||
.padding-03 {
|
||||
padding: 0.3rem;
|
||||
}
|
||||
@@ -169,55 +117,11 @@ export default {
|
||||
padding: 0.3rem;
|
||||
}
|
||||
|
||||
.thumbnail-placeholder {
|
||||
|
||||
}
|
||||
|
||||
.card-img-overlay {
|
||||
pointer-events: none;
|
||||
padding: 0.75rem;
|
||||
bottom: unset;
|
||||
top: 0;
|
||||
left: unset;
|
||||
right: unset;
|
||||
}
|
||||
|
||||
.badge-resolution {
|
||||
color: #212529;
|
||||
background-color: #FFC107;
|
||||
}
|
||||
|
||||
.play {
|
||||
position: absolute;
|
||||
width: 25px;
|
||||
height: 25px;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.play svg {
|
||||
fill: rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
|
||||
.doc-card {
|
||||
padding-left: 3px;
|
||||
padding-right: 3px;
|
||||
}
|
||||
|
||||
.small-badge {
|
||||
padding: 1px 3px;
|
||||
font-size: 70%;
|
||||
}
|
||||
|
||||
.audio-fit {
|
||||
height: 39px;
|
||||
vertical-align: bottom;
|
||||
display: inline;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.sub-document .card {
|
||||
background: #AB47BC1F !important;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
<b-modal :visible="show" size="lg" :hide-footer="true" static lazy @close="$emit('close')" @hide="$emit('close')"
|
||||
>
|
||||
<template #modal-title>
|
||||
<h5 class="modal-title" :title="doc._source.name + ext(doc)">{{ doc._source.name + ext(doc) }}</h5>
|
||||
<h5 class="modal-title" :title="doc._source.name + ext(doc)">
|
||||
{{ doc._source.name + ext(doc) }}
|
||||
<router-link :to="`/file?byId=${doc._id}`">#</router-link>
|
||||
</h5>
|
||||
</template>
|
||||
|
||||
<img v-if="doc._props.hasThumbnail" :src="`t/${doc._source.index}/${doc._id}`" alt="" class="fit card-img-top">
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
<template>
|
||||
<b-list-group-item class="flex-column align-items-start mb-2" :class="{'sub-document': doc._props.isSubDocument}">
|
||||
<b-list-group-item class="flex-column align-items-start mb-2" :class="{'sub-document': doc._props.isSubDocument}"
|
||||
@mouseenter="onTnEnter()" @mouseleave="onTnLeave()">
|
||||
|
||||
<!-- Info modal-->
|
||||
<DocInfoModal :show="showInfo" :doc="doc" @close="showInfo = false"></DocInfoModal>
|
||||
|
||||
<div class="media ml-2">
|
||||
|
||||
<!-- Thumbnail-->
|
||||
<div v-if="doc._props.hasThumbnail" class="align-self-start mr-2 wrapper-sm">
|
||||
<div class="img-wrapper">
|
||||
<div v-if="doc._props.isPlayableVideo" class="play">
|
||||
@@ -25,6 +28,7 @@
|
||||
<FileIcon></FileIcon>
|
||||
</div>
|
||||
|
||||
<!-- Doc line-->
|
||||
<div class="doc-line ml-3">
|
||||
<div style="display: flex">
|
||||
<span class="info-icon" @click="showInfo = true"></span>
|
||||
@@ -56,7 +60,7 @@ import TagContainer from "@/components/TagContainer";
|
||||
import DocFileTitle from "@/components/DocFileTitle";
|
||||
import DocInfoModal from "@/components/DocInfoModal";
|
||||
import ContentDiv from "@/components/ContentDiv";
|
||||
import FileIcon from "@/components/FileIcon";
|
||||
import FileIcon from "@/components/icons/FileIcon";
|
||||
|
||||
export default {
|
||||
name: "DocListItem",
|
||||
@@ -85,7 +89,13 @@ export default {
|
||||
return this.doc.highlight["path.nGram"] + "/"
|
||||
}
|
||||
return this.doc._source.path + "/"
|
||||
}
|
||||
},
|
||||
onTnEnter() {
|
||||
this.hover = true;
|
||||
},
|
||||
onTnLeave() {
|
||||
this.hover = false;
|
||||
},
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -147,6 +157,7 @@ export default {
|
||||
.list-group-item .img-wrapper {
|
||||
width: 88px;
|
||||
height: 88px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.fit-sm {
|
||||
|
||||
173
sist2-vue/src/components/FullThumbnail.vue
Normal file
173
sist2-vue/src/components/FullThumbnail.vue
Normal file
@@ -0,0 +1,173 @@
|
||||
<template>
|
||||
<div v-if="doc._props.hasThumbnail" class="img-wrapper" @mouseenter="onTnEnter()" @mouseleave="onTnLeave()"
|
||||
@touchstart="onTouchStart()">
|
||||
<div v-if="doc._props.isAudio" class="card-img-overlay" :class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ humanTime(doc._source.duration) }}</span>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-if="doc._props.isImage && !hover && doc._props.tnW / doc._props.tnH < 5"
|
||||
class="card-img-overlay"
|
||||
:class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ `${doc._source.width}x${doc._source.height}` }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="(doc._props.isVideo || doc._props.isGif) && doc._source.duration > 0 && !hover"
|
||||
class="card-img-overlay"
|
||||
:class="{'small-badge': smallBadge}">
|
||||
<span class="badge badge-resolution">{{ humanTime(doc._source.duration) }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._props.isPlayableVideo" class="play">
|
||||
<svg viewBox="0 0 494.942 494.942" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="m35.353 0 424.236 247.471-424.236 247.471z"/>
|
||||
</svg>
|
||||
</div>
|
||||
|
||||
<img ref="tn"
|
||||
v-if="doc._props.isPlayableImage || doc._props.isPlayableVideo"
|
||||
:src="tnSrc"
|
||||
alt=""
|
||||
:style="{height: (doc._props.isGif && hover) ? `${tnHeight()}px` : undefined}"
|
||||
class="pointer fit card-img-top" @click="onThumbnailClick()">
|
||||
<img v-else :src="tnSrc" alt=""
|
||||
class="fit card-img-top">
|
||||
|
||||
<ThumbnailProgressBar v-if="hover && doc._props.hasVidPreview"
|
||||
:progress="(currentThumbnailNum + 1) / (doc._props.tnNum)"
|
||||
></ThumbnailProgressBar>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import {humanTime} from "@/util";
|
||||
import ThumbnailProgressBar from "@/components/ThumbnailProgressBar";
|
||||
|
||||
export default {
|
||||
name: "FullThumbnail",
|
||||
props: ["doc", "smallBadge"],
|
||||
components: {ThumbnailProgressBar},
|
||||
data() {
|
||||
return {
|
||||
hover: false,
|
||||
currentThumbnailNum: 0,
|
||||
timeoutId: null
|
||||
}
|
||||
},
|
||||
created() {
|
||||
this.$store.subscribe((mutation) => {
|
||||
if (mutation.type === "busTnTouchStart" && mutation.payload !== this.doc._id) {
|
||||
this.onTnLeave();
|
||||
}
|
||||
});
|
||||
},
|
||||
computed: {
|
||||
tnSrc() {
|
||||
const doc = this.doc;
|
||||
const props = doc._props;
|
||||
if (props.isGif && this.hover) {
|
||||
return `f/${doc._id}`;
|
||||
}
|
||||
return (this.currentThumbnailNum === 0)
|
||||
? `t/${doc._source.index}/${doc._id}`
|
||||
: `t/${doc._source.index}/${doc._id}${String(this.currentThumbnailNum).padStart(4, "0")}`;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
humanTime: humanTime,
|
||||
onThumbnailClick() {
|
||||
this.$emit("onThumbnailClick");
|
||||
},
|
||||
tnHeight() {
|
||||
return this.$refs.tn.height;
|
||||
},
|
||||
tnWidth() {
|
||||
return this.$refs.tn.width;
|
||||
},
|
||||
onTnEnter() {
|
||||
this.hover = true;
|
||||
if (this.doc._props.hasVidPreview) {
|
||||
this.currentThumbnailNum += 1;
|
||||
this.scheduleNextTnNum();
|
||||
}
|
||||
},
|
||||
onTnLeave() {
|
||||
this.currentThumbnailNum = 0;
|
||||
this.hover = false;
|
||||
if (this.timeoutId !== null) {
|
||||
window.clearTimeout(this.timeoutId);
|
||||
this.timeoutId = null;
|
||||
}
|
||||
},
|
||||
scheduleNextTnNum() {
|
||||
const INTERVAL = this.$store.state.optVidPreviewInterval ?? 700;
|
||||
this.timeoutId = window.setTimeout(() => {
|
||||
if (!this.hover) {
|
||||
return;
|
||||
}
|
||||
this.scheduleNextTnNum();
|
||||
if (this.currentThumbnailNum === this.doc._props.tnNum - 1) {
|
||||
this.currentThumbnailNum = 0;
|
||||
} else {
|
||||
this.currentThumbnailNum += 1;
|
||||
}
|
||||
}, INTERVAL);
|
||||
},
|
||||
onTouchStart() {
|
||||
this.$store.commit("busTnTouchStart", this.doc._id);
|
||||
if (!this.hover) {
|
||||
this.onTnEnter()
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.img-wrapper {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.img-wrapper:hover svg {
|
||||
fill: rgba(0, 0, 0, 1);
|
||||
}
|
||||
|
||||
.card-img-top {
|
||||
border-top-left-radius: 0;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
|
||||
.play {
|
||||
position: absolute;
|
||||
width: 25px;
|
||||
height: 25px;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.play svg {
|
||||
fill: rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
|
||||
.badge-resolution {
|
||||
color: #212529;
|
||||
background-color: #FFC107;
|
||||
}
|
||||
|
||||
.card-img-overlay {
|
||||
pointer-events: none;
|
||||
padding: 0.75rem;
|
||||
bottom: unset;
|
||||
top: 0;
|
||||
left: unset;
|
||||
right: unset;
|
||||
}
|
||||
|
||||
.small-badge {
|
||||
padding: 1px 3px;
|
||||
font-size: 70%;
|
||||
}
|
||||
|
||||
</style>
|
||||
@@ -7,13 +7,29 @@
|
||||
value-field="id"></b-form-select>
|
||||
</div>
|
||||
<div v-else>
|
||||
<b-list-group id="index-picker-desktop">
|
||||
|
||||
<div class="d-flex justify-content-between align-content-center">
|
||||
<span>
|
||||
{{ selectedIndices.length }}
|
||||
{{ selectedIndices.length === 1 ? $t("indexPicker.selectedIndex") : $t("indexPicker.selectedIndices") }}
|
||||
</span>
|
||||
|
||||
<div>
|
||||
<b-button variant="link" @click="selectAll()"> {{ $t("indexPicker.selectAll") }}</b-button>
|
||||
<b-button variant="link" @click="selectNone()"> {{ $t("indexPicker.selectNone") }}</b-button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<b-list-group id="index-picker-desktop" class="unselectable">
|
||||
<b-list-group-item
|
||||
v-for="idx in indices"
|
||||
@click="toggleIndex(idx)"
|
||||
class="d-flex justify-content-between align-items-center list-group-item-action pointer">
|
||||
@click="toggleIndex(idx, $event)"
|
||||
@click.shift="shiftClick(idx, $event)"
|
||||
class="d-flex justify-content-between align-items-center list-group-item-action pointer"
|
||||
:class="{active: lastClickIndex === idx}"
|
||||
>
|
||||
<div class="d-flex">
|
||||
<b-checkbox @change="toggleIndex(idx)" :checked="isSelected(idx)"></b-checkbox>
|
||||
<b-checkbox style="pointer-events: none" :checked="isSelected(idx)"></b-checkbox>
|
||||
{{ idx.name }}
|
||||
<span class="text-muted timestamp-text ml-2">{{ formatIdxDate(idx.timestamp) }}</span>
|
||||
</div>
|
||||
@@ -36,6 +52,7 @@ export default Vue.extend({
|
||||
data() {
|
||||
return {
|
||||
loading: true,
|
||||
lastClickIndex: null
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
@@ -53,13 +70,50 @@ export default Vue.extend({
|
||||
...mapActions({
|
||||
setSelectedIndices: "setSelectedIndices"
|
||||
}),
|
||||
shiftClick(index, e) {
|
||||
if (this.lastClickIndex === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const select = this.isSelected(this.lastClickIndex);
|
||||
|
||||
let leftBoundary = this.indices.indexOf(this.lastClickIndex);
|
||||
let rightBoundary = this.indices.indexOf(index);
|
||||
|
||||
if (rightBoundary < leftBoundary) {
|
||||
let tmp = leftBoundary;
|
||||
leftBoundary = rightBoundary;
|
||||
rightBoundary = tmp;
|
||||
}
|
||||
|
||||
for (let i = leftBoundary; i <= rightBoundary; i++) {
|
||||
if (select) {
|
||||
if (!this.isSelected(this.indices[i])) {
|
||||
this.setSelectedIndices([this.indices[i], ...this.selectedIndices]);
|
||||
}
|
||||
} else {
|
||||
this.setSelectedIndices(this.selectedIndices.filter(idx => idx !== this.indices[i]));
|
||||
}
|
||||
}
|
||||
},
|
||||
selectAll() {
|
||||
this.setSelectedIndices(this.indices);
|
||||
},
|
||||
selectNone() {
|
||||
this.setSelectedIndices([]);
|
||||
},
|
||||
onSelect(value) {
|
||||
this.setSelectedIndices(this.indices.filter(idx => value.includes(idx.id)));
|
||||
},
|
||||
formatIdxDate(timestamp: number): string {
|
||||
return format(new Date(timestamp * 1000), "yyyy-MM-dd");
|
||||
},
|
||||
toggleIndex(index) {
|
||||
toggleIndex(index, e) {
|
||||
if (e.shiftKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastClickIndex = index;
|
||||
if (this.isSelected(index)) {
|
||||
this.setSelectedIndices(this.selectedIndices.filter(idx => idx.id != index.id));
|
||||
} else {
|
||||
@@ -79,6 +133,11 @@ export default Vue.extend({
|
||||
font-size: 80%;
|
||||
}
|
||||
|
||||
.theme-black .version-badge {
|
||||
color: #eee !important;
|
||||
background: none;
|
||||
}
|
||||
|
||||
.version-badge {
|
||||
color: #222 !important;
|
||||
background: none;
|
||||
@@ -92,4 +151,41 @@ export default Vue.extend({
|
||||
overflow-y: auto;
|
||||
max-height: 132px;
|
||||
}
|
||||
|
||||
.btn-link:focus {
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.unselectable {
|
||||
user-select: none;
|
||||
-ms-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
|
||||
.list-group-item.active {
|
||||
z-index: 2;
|
||||
background-color: inherit;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.theme-black .list-group-item {
|
||||
border: 1px solid rgba(255,255,255, 0.1);
|
||||
}
|
||||
|
||||
.theme-black .list-group-item:first-child {
|
||||
border: 1px solid rgba(255,255,255, 0.05);
|
||||
}
|
||||
|
||||
.theme-black .list-group-item.active {
|
||||
z-index: 2;
|
||||
background-color: inherit;
|
||||
color: inherit;
|
||||
border: 1px solid rgba(255,255,255, 0.3);
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.theme-black .list-group {
|
||||
border-radius: 0;
|
||||
}
|
||||
</style>
|
||||
@@ -1,6 +1,5 @@
|
||||
<template>
|
||||
<b-table :items="tableItems" small borderless responsive="md" thead-class="hidden" class="mb-0 mt-4">
|
||||
|
||||
<template #cell(value)="data">
|
||||
<span v-if="'html' in data.item" v-html="data.item.html"></span>
|
||||
<span v-else>{{ data.value }}</span>
|
||||
@@ -33,12 +32,18 @@ function dmsToDecimal(dms, ref) {
|
||||
export default {
|
||||
name: "InfoTable",
|
||||
props: ["doc"],
|
||||
data() {
|
||||
return {
|
||||
indexName: "loading..."
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
tableItems() {
|
||||
this.indexName;
|
||||
const src = this.doc._source;
|
||||
|
||||
const items = [
|
||||
{key: "index", value: `[${this.$store.getters.indexMap[src.index].name}]`},
|
||||
{key: "index", value: `[${this.indexName}]`},
|
||||
{key: "mtime", value: humanDate(src.mtime)},
|
||||
{key: "mime", value: src.mime},
|
||||
{key: "size", value: humanFileSize(src.size)},
|
||||
@@ -85,7 +90,16 @@ export default {
|
||||
|
||||
return items;
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
if (this.$store.getters.indexMap[this.doc.index]) {
|
||||
this.indexName = this.$store.getters.indexMap[this.doc._source.index].name
|
||||
}
|
||||
|
||||
window.setTimeout(() => {
|
||||
this.indexName = this.$store.getters.indexMap[this.doc._source.index].name
|
||||
}, 500)
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
<template>
|
||||
<div>
|
||||
<!-- TODO: Set slideshowTime as a configurable option-->
|
||||
<FsLightbox
|
||||
:key="lightboxKey"
|
||||
:toggler="showLightbox"
|
||||
@@ -10,7 +9,7 @@
|
||||
:types="lightboxTypes"
|
||||
:source-index="lightboxSlide"
|
||||
:custom-toolbar-buttons="customButtons"
|
||||
:slideshow-time="1000 * 10"
|
||||
:slideshow-time="$store.getters.optLightboxSlideDuration * 1000"
|
||||
:zoom-increment="0.5"
|
||||
:load-only-current-source="$store.getters.optLightboxLoadOnlyCurrent"
|
||||
:on-close="onClose"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<p>
|
||||
<b>{{
|
||||
`[${$store.getters.indices.find(i => i.id === hit._source.index).name}]`
|
||||
}}</b>{{ `/${hit._source.path}/${hit._source.name}${ext(hit)}` }}
|
||||
}}</b>{{ `${hit._source.path === '' ? '' : '/'}${hit._source.path}/${hit._source.name}${ext(hit)}` }}
|
||||
</p>
|
||||
<p style="margin-top: -1em">
|
||||
<span v-if="hit._source.width">{{ `${hit._source.width}x${hit._source.height}`}}</span>
|
||||
|
||||
@@ -7,28 +7,103 @@ import InspireTree from "inspire-tree";
|
||||
import InspireTreeDOM from "inspire-tree-dom";
|
||||
|
||||
import "inspire-tree-dom/dist/inspire-tree-light.min.css";
|
||||
import {getSelectedTreeNodes} from "@/util";
|
||||
import {getSelectedTreeNodes, getTreeNodeAttributes} from "@/util";
|
||||
import Sist2Api from "@/Sist2Api";
|
||||
import Sist2Query from "@/Sist2Query";
|
||||
|
||||
export default {
|
||||
name: "MimePicker",
|
||||
data() {
|
||||
return {
|
||||
mimeTree: null,
|
||||
stashedMimeTreeAttributes: null,
|
||||
updateBusy: false
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.$store.subscribe((mutation) => {
|
||||
if (mutation.type === "setUiMimeMap") {
|
||||
const mimeMap = mutation.payload.slice();
|
||||
if (mutation.type === "setUiMimeMap" && this.mimeTree === null) {
|
||||
this.initializeTree();
|
||||
} else if (mutation.type === "busSearch") {
|
||||
this.updateTree();
|
||||
}
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
handleTreeClick(node, e) {
|
||||
if (e === "indeterminate" || e === "collapsed" || e === 'rendered' || e === "focused") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.updateBusy) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.$store.commit("setSelectedMimeTypes", getSelectedTreeNodes(this.mimeTree));
|
||||
},
|
||||
updateTree() {
|
||||
|
||||
if (this.$store.getters.optUpdateMimeMap === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.updateBusy) {
|
||||
return
|
||||
}
|
||||
this.updateBusy = true;
|
||||
|
||||
if (this.stashedMimeTreeAttributes === null) {
|
||||
this.stashedMimeTreeAttributes = getTreeNodeAttributes(this.mimeTree);
|
||||
}
|
||||
|
||||
const query = Sist2Query.searchQuery();
|
||||
|
||||
Sist2Api.getMimeTypes(query).then(({buckets, mimeMap}) => {
|
||||
this.$store.commit("setUiMimeMap", mimeMap);
|
||||
this.$store.commit("setUiDetailsMimeAgg", buckets);
|
||||
|
||||
this.mimeTree.removeAll();
|
||||
this.mimeTree.addNodes(mimeMap);
|
||||
|
||||
// Restore selected mimes
|
||||
if (this.stashedMimeTreeAttributes === null) {
|
||||
// NOTE: This happens when successive fast searches are triggered
|
||||
this.stashedMimeTreeAttributes = {};
|
||||
// Always add the selected mime types
|
||||
this.$store.state.selectedMimeTypes.forEach(mime => {
|
||||
this.stashedMimeTreeAttributes[mime] = {
|
||||
checked: true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Object.entries(this.stashedMimeTreeAttributes).forEach(([mime, attributes]) => {
|
||||
if (this.mimeTree.node(mime)) {
|
||||
if (attributes.checked) {
|
||||
this.mimeTree.node(mime).select();
|
||||
}
|
||||
if (attributes.collapsed === false) {
|
||||
this.mimeTree.node(mime).expand();
|
||||
}
|
||||
}
|
||||
});
|
||||
this.stashedMimeTreeAttributes = null;
|
||||
this.updateBusy = false;
|
||||
});
|
||||
},
|
||||
|
||||
initializeTree() {
|
||||
const mimeMap = this.$store.state.uiMimeMap;
|
||||
|
||||
this.mimeTree = new InspireTree({
|
||||
selection: {
|
||||
mode: 'checkbox'
|
||||
mode: "checkbox"
|
||||
},
|
||||
data: mimeMap
|
||||
});
|
||||
|
||||
new InspireTreeDOM(this.mimeTree, {
|
||||
target: '#mimeTree'
|
||||
target: "#mimeTree"
|
||||
});
|
||||
this.mimeTree.on("node.state.changed", this.handleTreeClick);
|
||||
this.mimeTree.deselect();
|
||||
@@ -39,16 +114,6 @@ export default {
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
handleTreeClick(node, e) {
|
||||
if (e === "indeterminate" || e === "collapsed" || e === 'rendered' || e === "focused") {
|
||||
return;
|
||||
}
|
||||
|
||||
this.$store.commit("setSelectedMimeTypes", getSelectedTreeNodes(this.mimeTree));
|
||||
},
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
</b-navbar-brand>
|
||||
|
||||
<span class="badge badge-pill version" v-if="$store && $store.state.sist2Info">
|
||||
v{{ sist2Version() }}<span v-if="isDebug()">-dbg</span>
|
||||
v{{ sist2Version() }}<span v-if="isDebug()">-dbg</span><span v-if="isLegacy() && !hideLegacy()">-<a
|
||||
href="https://github.com/simon987/sist2/blob/master/docs/USAGE.md#elasticsearch" target="_blank">legacyES</a></span>
|
||||
</span>
|
||||
|
||||
<span v-if="$store && $store.state.sist2Info" class="tagline" v-html="tagline()"></span>
|
||||
@@ -19,7 +20,8 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Sist2Icon from "@/components/Sist2Icon";
|
||||
import Sist2Icon from "@/components/icons/Sist2Icon";
|
||||
|
||||
export default {
|
||||
name: "NavBar",
|
||||
components: {Sist2Icon},
|
||||
@@ -32,6 +34,12 @@ export default {
|
||||
},
|
||||
isDebug() {
|
||||
return this.$store.state.sist2Info.debug;
|
||||
},
|
||||
isLegacy() {
|
||||
return this.$store.state.sist2Info.esVersionLegacy;
|
||||
},
|
||||
hideLegacy() {
|
||||
return this.$store.state.optHideLegacy;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,31 +3,56 @@
|
||||
<span>{{ hitCount }} {{ hitCount === 1 ? $t("hit") : $t("hits") }}</span>
|
||||
|
||||
<div style="float: right">
|
||||
<b-button v-b-toggle.collapse-1 variant="primary" class="not-mobile">{{ $t("details") }}</b-button>
|
||||
<b-button v-b-toggle.collapse-1 variant="primary" class="not-mobile" @click="onToggle()">{{
|
||||
$t("details")
|
||||
}}
|
||||
</b-button>
|
||||
|
||||
<template v-if="hitCount !== 0">
|
||||
<SortSelect class="ml-2"></SortSelect>
|
||||
|
||||
<DisplayModeToggle class="ml-2"></DisplayModeToggle>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<b-collapse id="collapse-1" class="pt-2" style="clear:both;">
|
||||
<b-card>
|
||||
<b-table :items="tableItems" small borderless thead-class="hidden" class="mb-0"></b-table>
|
||||
<b-table :items="tableItems" small borderless bordered thead-class="hidden" class="mb-0"></b-table>
|
||||
|
||||
<br/>
|
||||
<h4>
|
||||
{{$t("mimeTypes")}}
|
||||
<b-button size="sm" variant="primary" class="float-right" @click="onCopyClick"><ClipboardIcon/></b-button>
|
||||
</h4>
|
||||
<Preloader v-if="$store.state.uiDetailsMimeAgg == null"></Preloader>
|
||||
<b-table
|
||||
v-else
|
||||
sort-by="doc_count"
|
||||
:sort-desc="true"
|
||||
thead-class="hidden"
|
||||
:items="$store.state.uiDetailsMimeAgg" small bordered class="mb-0"
|
||||
></b-table>
|
||||
</b-card>
|
||||
</b-collapse>
|
||||
</b-card>
|
||||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import {EsResult} from "@/Sist2Api";
|
||||
import Sist2Api, {EsResult} from "@/Sist2Api";
|
||||
import Vue from "vue";
|
||||
import {humanFileSize, humanTime} from "@/util";
|
||||
import {humanFileSize} from "@/util";
|
||||
import DisplayModeToggle from "@/components/DisplayModeToggle.vue";
|
||||
import SortSelect from "@/components/SortSelect.vue";
|
||||
import Preloader from "@/components/Preloader.vue";
|
||||
import Sist2Query from "@/Sist2Query";
|
||||
import ClipboardIcon from "@/components/icons/ClipboardIcon.vue";
|
||||
|
||||
export default Vue.extend({
|
||||
name: "ResultsCard",
|
||||
components: {SortSelect, DisplayModeToggle},
|
||||
components: {ClipboardIcon, Preloader, SortSelect, DisplayModeToggle},
|
||||
created() {
|
||||
|
||||
},
|
||||
computed: {
|
||||
lastResultsLoaded() {
|
||||
return this.$store.state.lastQueryResults != null;
|
||||
@@ -52,6 +77,39 @@ export default Vue.extend({
|
||||
totalSize() {
|
||||
return humanFileSize((this.$store.state.lastQueryResults as EsResult).aggregations.total_size.value);
|
||||
},
|
||||
onToggle() {
|
||||
const show = !document.getElementById("collapse-1").classList.contains("show");
|
||||
this.$store.commit("setUiShowDetails", show);
|
||||
|
||||
if (show && this.$store.state.uiDetailsMimeAgg == null && !this.$store.state.optUpdateMimeMap) {
|
||||
// Mime aggs are not updated automatically, update now
|
||||
this.forceUpdateMimeAgg();
|
||||
}
|
||||
},
|
||||
onCopyClick() {
|
||||
let tsvString = "";
|
||||
this.$store.state.uiDetailsMimeAgg.slice().sort((a,b) => b["doc_count"] - a["doc_count"]).forEach(row => {
|
||||
tsvString += `${row["key"]}\t${row["doc_count"]}\n`;
|
||||
});
|
||||
|
||||
navigator.clipboard.writeText(tsvString);
|
||||
|
||||
this.$bvToast.toast(
|
||||
this.$t("toast.copiedToClipboard"),
|
||||
{
|
||||
title: null,
|
||||
noAutoHide: false,
|
||||
toaster: "b-toaster-bottom-right",
|
||||
headerClass: "hidden",
|
||||
bodyClass: "toast-body-info",
|
||||
});
|
||||
},
|
||||
forceUpdateMimeAgg() {
|
||||
const query = Sist2Query.searchQuery();
|
||||
Sist2Api.getMimeTypes(query).then(({buckets}) => {
|
||||
this.$store.commit("setUiDetailsMimeAgg", buckets);
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -19,6 +19,14 @@
|
||||
{{ $t("sort.sizeDesc") }}
|
||||
</b-dropdown-item>
|
||||
|
||||
<b-dropdown-item :class="{'dropdown-active': sort === 'nameDesc'}" @click="onSelect('nameDesc')">
|
||||
{{ $t("sort.nameDesc") }}
|
||||
</b-dropdown-item>
|
||||
|
||||
<b-dropdown-item :class="{'dropdown-active': sort === 'nameAsc'}" @click="onSelect('nameAsc')">
|
||||
{{ $t("sort.nameAsc") }}
|
||||
</b-dropdown-item>
|
||||
|
||||
<b-dropdown-item :class="{'dropdown-active': sort === 'random'}" @click="onSelect('random')">
|
||||
{{ $t("sort.random") }}
|
||||
</b-dropdown-item>
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
>{{ tag.text.split(".").pop() }}</span>
|
||||
|
||||
<b-popover :target="hit._id+tag.rawText" triggers="focus blur" placement="top">
|
||||
<b-button variant="danger" @click="onTagDeleteClick(tag, $event)">Delete</b-button>
|
||||
<b-button variant="danger" @click="onTagDeleteClick(tag, $event)">{{$t("deleteTag")}}</b-button>
|
||||
</b-popover>
|
||||
</div>
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
</template>
|
||||
|
||||
<!-- Add button -->
|
||||
<small v-if="showAddButton" class="badge add-tag-button" @click="tagAdd()">Add</small>
|
||||
<small v-if="showAddButton" class="badge add-tag-button" @click="tagAdd()">{{$t("addTag")}}</small>
|
||||
|
||||
<!-- Size tag-->
|
||||
<small v-else class="text-muted badge-size">{{
|
||||
|
||||
@@ -120,7 +120,7 @@ export default {
|
||||
},
|
||||
mounted() {
|
||||
this.$store.subscribe((mutation) => {
|
||||
if (mutation.type === "setUiMimeMap") {
|
||||
if (mutation.type === "setUiMimeMap" && this.tagTree === null) {
|
||||
this.initializeTree();
|
||||
this.updateTree();
|
||||
} else if (mutation.type === "busUpdateTags") {
|
||||
@@ -147,6 +147,7 @@ export default {
|
||||
this.tagTree.on("node.state.changed", this.handleTreeClick);
|
||||
},
|
||||
updateTree() {
|
||||
// TODO: remember which tags are selected and restore?
|
||||
const tagMap = [];
|
||||
Sist2Api.getTags().then(tags => {
|
||||
tags.forEach(tag => addTag(tagMap, tag.id, tag.id, tag.count));
|
||||
|
||||
40
sist2-vue/src/components/ThumbnailProgressBar.vue
Normal file
40
sist2-vue/src/components/ThumbnailProgressBar.vue
Normal file
@@ -0,0 +1,40 @@
|
||||
<template>
|
||||
<div class="thumbnail-progress-bar" :style="{width: `${percentProgress}%`}"></div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "ThumbnailProgressBar",
|
||||
props: ["doc", "progress"],
|
||||
computed: {
|
||||
percentProgress() {
|
||||
return Math.min(Math.max(this.progress * 100, 0), 100);
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
|
||||
.thumbnail-progress-bar {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
||||
height: 4px;
|
||||
background: #2196f3AA;
|
||||
|
||||
z-index: 9;
|
||||
}
|
||||
|
||||
.theme-black .thumbnail-progress-bar {
|
||||
background: rgba(0, 188, 212, 0.95);
|
||||
}
|
||||
|
||||
.sub-document .thumbnail-progress-bar {
|
||||
max-width: calc(100% - 8px);
|
||||
left: 4px;
|
||||
}
|
||||
|
||||
|
||||
</style>
|
||||
21
sist2-vue/src/components/icons/ClipboardIcon.vue
Normal file
21
sist2-vue/src/components/icons/ClipboardIcon.vue
Normal file
@@ -0,0 +1,21 @@
|
||||
<template>
|
||||
<svg style="width:24px;height:24px" viewBox="0 0 24 24">
|
||||
<path
|
||||
fill="currentColor"
|
||||
d="M17,9H7V7H17M17,13H7V11H17M14,17H7V15H14M12,3A1,1 0 0,1 13,4A1,1 0 0,1 12,5A1,1 0 0,1 11,4A1,1 0 0,1 12,3M19,3H14.82C14.4,1.84 13.3,1 12,1C10.7,1 9.6,1.84 9.18,3H5A2,2 0 0,0 3,5V19A2,2 0 0,0 5,21H19A2,2 0 0,0 21,19V5A2,2 0 0,0 19,3Z"/>
|
||||
</svg>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "ClipboardIcon"
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
svg {
|
||||
display: inline-block;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
}
|
||||
</style>
|
||||
21
sist2-vue/src/components/icons/LanguageIcon.vue
Normal file
21
sist2-vue/src/components/icons/LanguageIcon.vue
Normal file
@@ -0,0 +1,21 @@
|
||||
<template>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path
|
||||
fill="currentColor"
|
||||
d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm1 16.057v-3.057h2.994c-.059 1.143-.212 2.24-.456 3.279-.823-.12-1.674-.188-2.538-.222zm1.957 2.162c-.499 1.33-1.159 2.497-1.957 3.456v-3.62c.666.028 1.319.081 1.957.164zm-1.957-7.219v-3.015c.868-.034 1.721-.103 2.548-.224.238 1.027.389 2.111.446 3.239h-2.994zm0-5.014v-3.661c.806.969 1.471 2.15 1.971 3.496-.642.084-1.3.137-1.971.165zm2.703-3.267c1.237.496 2.354 1.228 3.29 2.146-.642.234-1.311.442-2.019.607-.344-.992-.775-1.91-1.271-2.753zm-7.241 13.56c-.244-1.039-.398-2.136-.456-3.279h2.994v3.057c-.865.034-1.714.102-2.538.222zm2.538 1.776v3.62c-.798-.959-1.458-2.126-1.957-3.456.638-.083 1.291-.136 1.957-.164zm-2.994-7.055c.057-1.128.207-2.212.446-3.239.827.121 1.68.19 2.548.224v3.015h-2.994zm1.024-5.179c.5-1.346 1.165-2.527 1.97-3.496v3.661c-.671-.028-1.329-.081-1.97-.165zm-2.005-.35c-.708-.165-1.377-.373-2.018-.607.937-.918 2.053-1.65 3.29-2.146-.496.844-.927 1.762-1.272 2.753zm-.549 1.918c-.264 1.151-.434 2.36-.492 3.611h-3.933c.165-1.658.739-3.197 1.617-4.518.88.361 1.816.67 2.808.907zm.009 9.262c-.988.236-1.92.542-2.797.9-.89-1.328-1.471-2.879-1.637-4.551h3.934c.058 1.265.231 2.488.5 3.651zm.553 1.917c.342.976.768 1.881 1.257 2.712-1.223-.49-2.326-1.211-3.256-2.115.636-.229 1.299-.435 1.999-.597zm9.924 0c.7.163 1.362.367 1.999.597-.931.903-2.034 1.625-3.257 2.116.489-.832.915-1.737 1.258-2.713zm.553-1.917c.27-1.163.442-2.386.501-3.651h3.934c-.167 1.672-.748 3.223-1.638 4.551-.877-.358-1.81-.664-2.797-.9zm.501-5.651c-.058-1.251-.229-2.46-.492-3.611.992-.237 1.929-.546 2.809-.907.877 1.321 1.451 2.86 1.616 4.518h-3.933z"/>
|
||||
</svg>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "LanguageIcon"
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
svg {
|
||||
display: inline-block;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
}
|
||||
</style>
|
||||
@@ -1,10 +1,15 @@
|
||||
export default {
|
||||
en: {
|
||||
filePage: {
|
||||
notFound: "Not found"
|
||||
},
|
||||
searchBar: {
|
||||
simple: "Search",
|
||||
advanced: "Advanced search",
|
||||
fuzzy: "Fuzzy"
|
||||
},
|
||||
addTag: "Add",
|
||||
deleteTag: "Delete",
|
||||
download: "Download",
|
||||
and: "and",
|
||||
page: "page",
|
||||
@@ -63,7 +68,11 @@ export default {
|
||||
slideDuration: "Slide duration",
|
||||
resultSize: "Number of results per page",
|
||||
tagOrOperator: "Use OR operator when specifying multiple tags.",
|
||||
hideDuplicates: "Hide duplicate results based on checksum"
|
||||
hideDuplicates: "Hide duplicate results based on checksum",
|
||||
hideLegacy: "Hide the 'legacyES' Elasticsearch notice",
|
||||
updateMimeMap: "Update the Media Types tree in real time",
|
||||
useDatePicker: "Use a Date Picker component rather than a slider",
|
||||
vidPreviewInterval: "Video preview frame duration in ms"
|
||||
},
|
||||
queryMode: {
|
||||
simple: "Simple",
|
||||
@@ -71,7 +80,8 @@ export default {
|
||||
},
|
||||
lang: {
|
||||
en: "English",
|
||||
fr: "Français"
|
||||
fr: "Français",
|
||||
"zh-CN": "简体中文",
|
||||
},
|
||||
displayMode: {
|
||||
grid: "Grid",
|
||||
@@ -125,18 +135,21 @@ export default {
|
||||
esQueryErr: "Could not parse or execute query, please check the Advanced search documentation. " +
|
||||
"See server logs for more information.",
|
||||
dupeTagTitle: "Duplicate tag",
|
||||
dupeTag: "This tag already exists for this document."
|
||||
dupeTag: "This tag already exists for this document.",
|
||||
copiedToClipboard: "Copied to clipboard"
|
||||
},
|
||||
saveTagModalTitle: "Add tag",
|
||||
saveTagPlaceholder: "Tag name",
|
||||
confirm: "Confirm",
|
||||
indexPickerPlaceholder: "Select indices",
|
||||
indexPickerPlaceholder: "Select an index",
|
||||
sort: {
|
||||
relevance: "Relevance",
|
||||
dateAsc: "Date (Older first)",
|
||||
dateDesc: "Date (Newer first)",
|
||||
sizeAsc: "Size (Smaller first)",
|
||||
sizeDesc: "Size (Larger first)",
|
||||
nameAsc: "Name (A-z)",
|
||||
nameDesc: "Name (Z-a)",
|
||||
random: "Random",
|
||||
},
|
||||
d3: {
|
||||
@@ -144,14 +157,25 @@ export default {
|
||||
mimeSize: "Size distribution by media type",
|
||||
dateHistogram: "File modification time distribution",
|
||||
sizeHistogram: "File size distribution",
|
||||
}
|
||||
},
|
||||
indexPicker: {
|
||||
selectNone: "Select None",
|
||||
selectAll: "Select All",
|
||||
selectedIndex: "selected index",
|
||||
selectedIndices: "selected indices",
|
||||
},
|
||||
},
|
||||
fr: {
|
||||
filePage: {
|
||||
notFound: "Ficher introuvable"
|
||||
},
|
||||
searchBar: {
|
||||
simple: "Recherche",
|
||||
advanced: "Recherche avancée",
|
||||
fuzzy: "Approximatif"
|
||||
},
|
||||
addTag: "Ajouter",
|
||||
deleteTag: "Supprimer",
|
||||
download: "Télécharger",
|
||||
and: "et",
|
||||
page: "page",
|
||||
@@ -211,7 +235,11 @@ export default {
|
||||
slideDuration: "Durée des diapositives",
|
||||
resultSize: "Nombre de résultats par page",
|
||||
tagOrOperator: "Utiliser l'opérateur OU lors de la spécification de plusieurs tags",
|
||||
hideDuplicates: "Masquer les résultats en double"
|
||||
hideDuplicates: "Masquer les résultats en double",
|
||||
hideLegacy: "Masquer la notice 'legacyES' Elasticsearch",
|
||||
updateMimeMap: "Mettre à jour l'arbre de Types de médias en temps réel",
|
||||
useDatePicker: "Afficher un composant « Date Picker » plutôt qu'un slider",
|
||||
vidPreviewInterval: "Durée des images d'aperçu video en millisecondes"
|
||||
},
|
||||
queryMode: {
|
||||
simple: "Simple",
|
||||
@@ -219,7 +247,8 @@ export default {
|
||||
},
|
||||
lang: {
|
||||
en: "English",
|
||||
fr: "Français"
|
||||
fr: "Français",
|
||||
"zh-CN": "简体中文",
|
||||
},
|
||||
displayMode: {
|
||||
grid: "Grille",
|
||||
@@ -274,7 +303,8 @@ export default {
|
||||
esQueryErr: "Impossible d'analyser ou d'exécuter la requête, veuillez consulter la documentation sur la " +
|
||||
"recherche avancée. Voir les journaux du serveur pour plus d'informations.",
|
||||
dupeTagTitle: "Tag en double",
|
||||
dupeTag: "Ce tag existe déjà pour ce document."
|
||||
dupeTag: "Ce tag existe déjà pour ce document.",
|
||||
copiedToClipboard: "Copié dans le presse-papier"
|
||||
},
|
||||
saveTagModalTitle: "Ajouter un tag",
|
||||
saveTagPlaceholder: "Nom du tag",
|
||||
@@ -286,6 +316,8 @@ export default {
|
||||
dateDesc: "Date (Plus récent)",
|
||||
sizeAsc: "Taille (Plus petit)",
|
||||
sizeDesc: "Taille (Plus grand)",
|
||||
nameAsc: "Nom (A-z)",
|
||||
nameDesc: "Nom (Z-a)",
|
||||
random: "Aléatoire",
|
||||
},
|
||||
d3: {
|
||||
@@ -293,6 +325,178 @@ export default {
|
||||
mimeSize: "Distribution des tailles de fichiers par type de média",
|
||||
dateHistogram: "Distribution des dates de modification",
|
||||
sizeHistogram: "Distribution des tailles de fichier",
|
||||
}
|
||||
}
|
||||
},
|
||||
indexPicker: {
|
||||
selectNone: "Sélectionner aucun",
|
||||
selectAll: "Sélectionner tout",
|
||||
selectedIndex: "indice sélectionné",
|
||||
selectedIndices: "indices sélectionnés",
|
||||
},
|
||||
},
|
||||
"zh-CN": {
|
||||
filePage: {
|
||||
notFound: "未找到"
|
||||
},
|
||||
searchBar: {
|
||||
simple: "搜索",
|
||||
advanced: "高级搜索",
|
||||
fuzzy: "模糊搜索"
|
||||
},
|
||||
addTag: "添加",
|
||||
deleteTag: "删除",
|
||||
download: "下载",
|
||||
and: "与",
|
||||
page: "页",
|
||||
pages: "页",
|
||||
mimeTypes: "文件类型",
|
||||
tags: "标签",
|
||||
help: {
|
||||
simpleSearch: "简易搜索",
|
||||
advancedSearch: "高级搜索",
|
||||
help: "帮助",
|
||||
term: "<关键词>",
|
||||
and: "与操作",
|
||||
or: "或操作",
|
||||
not: "反选单个关键词",
|
||||
quotes: "括起来的部分视为一个关键词,保序",
|
||||
prefix: "在词尾使用时,匹配前缀",
|
||||
parens: "表达式编组",
|
||||
tildeTerm: "匹配编辑距离以内的关键词",
|
||||
tildePhrase: "匹配短语,容忍一些非匹配词",
|
||||
example1:
|
||||
"例如: <code>\"番茄\" +(炒蛋 | 牛腩) -饭</code> 将匹配" +
|
||||
"短语 <i>番茄炒蛋</i>、<i>炒蛋</i> 或者 <i>牛腩</i>,而忽略任何带有" +
|
||||
"<i>饭</i>的关键词.",
|
||||
defaultOperator:
|
||||
"表达式中无<code>+</code>或者<code>|</code>时,默认使用" +
|
||||
"<code>+</code>(与操作)。",
|
||||
fuzzy:
|
||||
"选中<b>模糊搜索</b>选项时,返回部分匹配的结果(3-grams)。",
|
||||
moreInfoSimple: "详细信息:<a target=\"_blank\" " +
|
||||
"rel=\"noreferrer\" href=\"//www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html\">Elasticsearch文档</a>",
|
||||
moreInfoAdvanced: "高级搜索模式文档:<a target=\"_blank\" rel=\"noreferrer\" href=\"//www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#query-string-syntax\">Elasticsearch文档</a>"
|
||||
},
|
||||
config: "配置",
|
||||
configDescription: "配置在此浏览器中实时保存。",
|
||||
configReset: "重置所有设置",
|
||||
searchOptions: "搜索选项",
|
||||
treemapOptions: "树状图选项",
|
||||
displayOptions: "显示选项",
|
||||
opt: {
|
||||
lang: "语言",
|
||||
highlight: "启用高亮",
|
||||
fuzzy: "默认使用模糊搜索",
|
||||
searchInPath: "匹配文档路径",
|
||||
suggestPath: "搜索框启用自动补全",
|
||||
fragmentSize: "高亮上下文大小",
|
||||
queryMode: "搜索模式",
|
||||
displayMode: "显示",
|
||||
columns: "列数",
|
||||
treemapType: "树状图类属性",
|
||||
treemapTiling: "树状图平铺",
|
||||
treemapColorGroupingDepth: "树状图颜色编组深度(展开)",
|
||||
treemapColor: "树状图颜色(折叠)",
|
||||
treemapSize: "树状图大小",
|
||||
theme: "主题",
|
||||
lightboxLoadOnlyCurrent: "在图片查看器中,不要预读相邻的全图",
|
||||
slideDuration: "幻灯片时长",
|
||||
resultSize: "每页结果数",
|
||||
tagOrOperator: "使用或操作(OR)匹配多个标签。",
|
||||
hideDuplicates: "使用校验码隐藏重复结果",
|
||||
hideLegacy: "隐藏'legacyES' Elasticsearch 通知",
|
||||
updateMimeMap: "媒体类型树的实时更新",
|
||||
useDatePicker: "使用日期选择器组件而不是滑块",
|
||||
vidPreviewInterval: "视频预览帧的持续时间,以毫秒为单位"
|
||||
},
|
||||
queryMode: {
|
||||
simple: "简单",
|
||||
advanced: "高级",
|
||||
},
|
||||
lang: {
|
||||
en: "English",
|
||||
fr: "Français",
|
||||
"zh-CN": "简体中文",
|
||||
},
|
||||
displayMode: {
|
||||
grid: "网格",
|
||||
list: "列表",
|
||||
},
|
||||
columns: {
|
||||
auto: "自动"
|
||||
},
|
||||
treemapType: {
|
||||
cascaded: "折叠",
|
||||
flat: "平铺(紧凑)"
|
||||
},
|
||||
treemapSize: {
|
||||
small: "小",
|
||||
medium: "中",
|
||||
large: "大",
|
||||
xLarge: "加大",
|
||||
xxLarge: "加加大",
|
||||
custom: "自订",
|
||||
},
|
||||
treemapTiling: {
|
||||
binary: "Binary",
|
||||
squarify: "Squarify",
|
||||
slice: "Slice",
|
||||
dice: "Dice",
|
||||
sliceDice: "Slice & Dice",
|
||||
},
|
||||
theme: {
|
||||
light: "亮",
|
||||
black: "暗"
|
||||
},
|
||||
hit: "命中",
|
||||
hits: "命中",
|
||||
details: "详细信息",
|
||||
stats: "统计信息",
|
||||
queryTime: "查询时间",
|
||||
totalSize: "总大小",
|
||||
pathBar: {
|
||||
placeholder: "过滤路径",
|
||||
modalTitle: "选择路径"
|
||||
},
|
||||
debug: "调试信息",
|
||||
debugDescription: "对调试除错有用的信息。 若您遇到bug或者想建议新功能,请提交新Issue到" +
|
||||
"<a href='https://github.com/simon987/sist2/issues/new/choose'>这里</a>.",
|
||||
tagline: "标签栏",
|
||||
toast: {
|
||||
esConnErrTitle: "Elasticsearch连接错误",
|
||||
esConnErr: "sist2 web 模块连接Elasticsearch出错。" +
|
||||
"查看服务日志以获取更多信息。",
|
||||
esQueryErrTitle: "查询错误",
|
||||
esQueryErr: "无法识别或执行查询,请查阅高级搜索文档。" +
|
||||
"查看服务日志以获取更多信息。",
|
||||
dupeTagTitle: "重复标签",
|
||||
dupeTag: "该标签已存在于此文档。",
|
||||
copiedToClipboard: "复制到剪贴板"
|
||||
},
|
||||
saveTagModalTitle: "增加标签",
|
||||
saveTagPlaceholder: "标签名",
|
||||
confirm: "确认",
|
||||
indexPickerPlaceholder: "选择一个索引",
|
||||
sort: {
|
||||
relevance: "相关度",
|
||||
dateAsc: "日期(由旧到新)",
|
||||
dateDesc: "日期(由新到旧)",
|
||||
sizeAsc: "大小(从小到大)",
|
||||
sizeDesc: "大小(从大到小)",
|
||||
nameAsc: "名字(A-z)",
|
||||
nameDesc: "名字 (Z-a)",
|
||||
random: "随机",
|
||||
},
|
||||
d3: {
|
||||
mimeCount: "各类文件数量分布",
|
||||
mimeSize: "各类文件大小分布",
|
||||
dateHistogram: "文件修改时间分布",
|
||||
sizeHistogram: "文件大小分布",
|
||||
},
|
||||
indexPicker: {
|
||||
selectNone: "清空",
|
||||
selectAll: "全选",
|
||||
selectedIndex: "选中索引",
|
||||
selectedIndices: "选中索引",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import VueRouter, {RouteConfig} from "vue-router"
|
||||
import StatsPage from "../views/StatsPage.vue"
|
||||
import Configuration from "../views/Configuration.vue"
|
||||
import SearchPage from "@/views/SearchPage.vue";
|
||||
import FilePage from "@/views/FilePage.vue";
|
||||
|
||||
Vue.use(VueRouter)
|
||||
|
||||
@@ -21,6 +22,11 @@ const routes: Array<RouteConfig> = [
|
||||
path: "/config",
|
||||
name: "Configuration",
|
||||
component: Configuration
|
||||
},
|
||||
{
|
||||
path: "/file",
|
||||
name: "File",
|
||||
component: FilePage
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ export default new Vuex.Store({
|
||||
size: 60,
|
||||
|
||||
optLang: "en",
|
||||
optLangIsDefault: true,
|
||||
optHideDuplicates: true,
|
||||
optTheme: "light",
|
||||
optDisplay: "grid",
|
||||
@@ -46,6 +47,10 @@ export default new Vuex.Store({
|
||||
optTreemapColor: "PuBuGn",
|
||||
optLightboxLoadOnlyCurrent: false,
|
||||
optLightboxSlideDuration: 15,
|
||||
optHideLegacy: false,
|
||||
optUpdateMimeMap: false,
|
||||
optUseDatePicker: false,
|
||||
optVidPreviewInterval: 700,
|
||||
|
||||
_onLoadSelectedIndices: [] as string[],
|
||||
_onLoadSelectedMimeTypes: [] as string[],
|
||||
@@ -70,9 +75,14 @@ export default new Vuex.Store({
|
||||
uiLightboxSlide: 0,
|
||||
uiReachedScrollEnd: false,
|
||||
|
||||
uiDetailsMimeAgg: null,
|
||||
uiShowDetails: false,
|
||||
|
||||
uiMimeMap: [] as any[]
|
||||
},
|
||||
mutations: {
|
||||
setUiShowDetails: (state, val) => state.uiShowDetails = val,
|
||||
setUiDetailsMimeAgg: (state, val) => state.uiDetailsMimeAgg = val,
|
||||
setUiReachedScrollEnd: (state, val) => state.uiReachedScrollEnd = val,
|
||||
setTags: (state, val) => state.tags = val,
|
||||
setPathText: (state, val) => state.pathText = val,
|
||||
@@ -81,7 +91,10 @@ export default new Vuex.Store({
|
||||
setSist2Info: (state, val) => state.sist2Info = val,
|
||||
setSeed: (state, val) => state.seed = val,
|
||||
setOptHideDuplicates: (state, val) => state.optHideDuplicates = val,
|
||||
setOptLang: (state, val) => state.optLang = val,
|
||||
setOptLang: (state, val) => {
|
||||
state.optLang = val;
|
||||
state.optLangIsDefault = false;
|
||||
},
|
||||
setSortMode: (state, val) => state.sortMode = val,
|
||||
setIndices: (state, val) => {
|
||||
state.indices = val;
|
||||
@@ -144,8 +157,13 @@ export default new Vuex.Store({
|
||||
setOptTreemapColorGroupingDepth: (state, val) => state.optTreemapColorGroupingDepth = val,
|
||||
setOptTreemapSize: (state, val) => state.optTreemapSize = val,
|
||||
setOptTreemapColor: (state, val) => state.optTreemapColor = val,
|
||||
setOptHideLegacy: (state, val) => state.optHideLegacy = val,
|
||||
setOptUpdateMimeMap: (state, val) => state.optUpdateMimeMap = val,
|
||||
setOptUseDatePicker: (state, val) => state.optUseDatePicker = val,
|
||||
setOptVidPreviewInterval: (state, val) => state.optVidPreviewInterval = val,
|
||||
|
||||
setOptLightboxLoadOnlyCurrent: (state, val) => state.optLightboxLoadOnlyCurrent = val,
|
||||
setOptLightboxSlideDuration: (state, val) => state.optLightboxSlideDuration = val,
|
||||
|
||||
setUiMimeMap: (state, val) => state.uiMimeMap = val,
|
||||
|
||||
@@ -155,8 +173,24 @@ export default new Vuex.Store({
|
||||
busUpdateTags: () => {
|
||||
// noop
|
||||
},
|
||||
busSearch: () => {
|
||||
// noop
|
||||
},
|
||||
busTouchEnd: () => {
|
||||
// noop
|
||||
},
|
||||
busTnTouchStart: (doc_id) => {
|
||||
// noop
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
setSist2Info: (store, val) => {
|
||||
store.commit("setSist2Info", val);
|
||||
|
||||
if (store.state.optLangIsDefault) {
|
||||
store.commit("setOptLang", val.lang);
|
||||
}
|
||||
},
|
||||
loadFromArgs({commit}, route: Route) {
|
||||
|
||||
if (route.query.q) {
|
||||
@@ -276,6 +310,7 @@ export default new Vuex.Store({
|
||||
commit("setUiLightboxTypes", []);
|
||||
commit("setUiLightboxCaptions", []);
|
||||
commit("setUiLightboxKey", 0);
|
||||
commit("setUiDetailsMimeAgg", null);
|
||||
}
|
||||
},
|
||||
modules: {},
|
||||
@@ -339,5 +374,9 @@ export default new Vuex.Store({
|
||||
optLightboxLoadOnlyCurrent: state => state.optLightboxLoadOnlyCurrent,
|
||||
optLightboxSlideDuration: state => state.optLightboxSlideDuration,
|
||||
optResultSize: state => state.size,
|
||||
optHideLegacy: state => state.optHideLegacy,
|
||||
optUpdateMimeMap: state => state.optUpdateMimeMap,
|
||||
optUseDatePicker: state => state.optUseDatePicker,
|
||||
optVidPreviewInterval: state => state.optVidPreviewInterval,
|
||||
}
|
||||
})
|
||||
@@ -1,8 +1,12 @@
|
||||
import {EsHit} from "@/Sist2Api";
|
||||
|
||||
export function ext(hit: EsHit) {
|
||||
return Object.prototype.hasOwnProperty.call(hit._source, "extension")
|
||||
&& hit["_source"]["extension"] !== "" ? "." + hit["_source"]["extension"] : "";
|
||||
return srcExt(hit._source)
|
||||
}
|
||||
|
||||
export function srcExt(src) {
|
||||
return Object.prototype.hasOwnProperty.call(src, "extension")
|
||||
&& src["extension"] !== "" ? "." + src["extension"] : "";
|
||||
}
|
||||
|
||||
export function strUnescape(str: string): string {
|
||||
@@ -97,6 +101,30 @@ export function getSelectedTreeNodes(tree: any) {
|
||||
return Array.from(selectedNodes);
|
||||
}
|
||||
|
||||
export function getTreeNodeAttributes(tree: any) {
|
||||
const nodes = tree.selectable();
|
||||
const attributes = {};
|
||||
|
||||
for (let i = 0; i < nodes.length; i++) {
|
||||
|
||||
let id = null;
|
||||
|
||||
if (nodes[i].text.indexOf("(") !== -1 && nodes[i].values) {
|
||||
id = nodes[i].values.slice(-1)[0];
|
||||
} else {
|
||||
id = nodes[i].id
|
||||
}
|
||||
|
||||
attributes[id] = {
|
||||
checked: nodes[i].itree.state.checked,
|
||||
collapsed: nodes[i].itree.state.collapsed,
|
||||
}
|
||||
}
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
|
||||
export function serializeMimes(mimes: string[]): string | undefined {
|
||||
if (mimes.length == 0) {
|
||||
return undefined;
|
||||
|
||||
@@ -15,11 +15,8 @@
|
||||
<h4>{{ $t("displayOptions") }}</h4>
|
||||
|
||||
<b-card>
|
||||
<b-form-checkbox :checked="optLightboxLoadOnlyCurrent" @input="setOptLightboxLoadOnlyCurrent">
|
||||
{{ $t("opt.lightboxLoadOnlyCurrent") }}
|
||||
</b-form-checkbox>
|
||||
|
||||
<label>{{ $t("opt.lang") }}</label>
|
||||
<label><LanguageIcon/><span style="vertical-align: middle"> {{ $t("opt.lang") }}</span></label>
|
||||
<b-form-select :options="langOptions" :value="optLang" @input="setOptLang"></b-form-select>
|
||||
|
||||
<label>{{ $t("opt.theme") }}</label>
|
||||
@@ -30,6 +27,24 @@
|
||||
|
||||
<label>{{ $t("opt.columns") }}</label>
|
||||
<b-form-select :options="columnsOptions" :value="optColumns" @input="setOptColumns"></b-form-select>
|
||||
|
||||
<div style="height: 10px"></div>
|
||||
|
||||
<b-form-checkbox :checked="optLightboxLoadOnlyCurrent" @input="setOptLightboxLoadOnlyCurrent">
|
||||
{{ $t("opt.lightboxLoadOnlyCurrent") }}
|
||||
</b-form-checkbox>
|
||||
|
||||
<b-form-checkbox :checked="optHideLegacy" @input="setOptHideLegacy">
|
||||
{{ $t("opt.hideLegacy") }}
|
||||
</b-form-checkbox>
|
||||
|
||||
<b-form-checkbox :checked="optUpdateMimeMap" @input="setOptUpdateMimeMap">
|
||||
{{ $t("opt.updateMimeMap") }}
|
||||
</b-form-checkbox>
|
||||
|
||||
<b-form-checkbox :checked="optUseDatePicker" @input="setOptUseDatePicker">
|
||||
{{ $t("opt.useDatePicker") }}
|
||||
</b-form-checkbox>
|
||||
</b-card>
|
||||
|
||||
<br/>
|
||||
@@ -70,6 +85,10 @@
|
||||
<label>{{ $t("opt.slideDuration") }}</label>
|
||||
<b-form-input :value="optLightboxSlideDuration" type="number" min="1"
|
||||
@input="setOptLightboxSlideDuration"></b-form-input>
|
||||
|
||||
<label>{{ $t("opt.vidPreviewInterval") }}</label>
|
||||
<b-form-input :value="optVidPreviewInterval" type="number" min="50"
|
||||
@input="setOptVidPreviewInterval"></b-form-input>
|
||||
</b-card>
|
||||
|
||||
<h4 class="mt-3">{{ $t("treemapOptions") }}</h4>
|
||||
@@ -113,15 +132,15 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Vue from "vue";
|
||||
import {mapGetters, mapMutations} from "vuex";
|
||||
import {mapActions, mapGetters, mapMutations} from "vuex";
|
||||
import DebugInfo from "@/components/DebugInfo.vue";
|
||||
import Preloader from "@/components/Preloader.vue";
|
||||
import sist2 from "@/Sist2Api";
|
||||
import GearIcon from "@/components/GearIcon.vue";
|
||||
import GearIcon from "@/components/icons/GearIcon.vue";
|
||||
import LanguageIcon from "@/components/icons/LanguageIcon";
|
||||
|
||||
export default {
|
||||
components: {GearIcon, DebugInfo, Preloader},
|
||||
components: {LanguageIcon, GearIcon, DebugInfo, Preloader},
|
||||
data() {
|
||||
return {
|
||||
loading: true,
|
||||
@@ -129,6 +148,7 @@ export default {
|
||||
langOptions: [
|
||||
{value: "en", text: this.$t("lang.en")},
|
||||
{value: "fr", text: this.$t("lang.fr")},
|
||||
{value: "zh-CN", text: this.$t("lang.zh-CN")},
|
||||
],
|
||||
queryModeOptions: [
|
||||
{value: "simple", text: this.$t("queryMode.simple")},
|
||||
@@ -215,6 +235,10 @@ export default {
|
||||
"optTagOrOperator",
|
||||
"optLang",
|
||||
"optHideDuplicates",
|
||||
"optHideLegacy",
|
||||
"optUpdateMimeMap",
|
||||
"optUseDatePicker",
|
||||
"optVidPreviewInterval",
|
||||
]),
|
||||
clientWidth() {
|
||||
return window.innerWidth;
|
||||
@@ -222,7 +246,7 @@ export default {
|
||||
},
|
||||
mounted() {
|
||||
sist2.getSist2Info().then(data => {
|
||||
this.$store.commit("setSist2Info", data)
|
||||
this.setSist2Info(data);
|
||||
this.loading = false;
|
||||
});
|
||||
|
||||
@@ -233,6 +257,9 @@ export default {
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
...mapActions({
|
||||
setSist2Info: "setSist2Info",
|
||||
}),
|
||||
...mapMutations([
|
||||
"setOptTheme",
|
||||
"setOptDisplay",
|
||||
@@ -250,11 +277,14 @@ export default {
|
||||
"setOptTreemapSize",
|
||||
"setOptLightboxLoadOnlyCurrent",
|
||||
"setOptLightboxSlideDuration",
|
||||
"setOptContainerWidth",
|
||||
"setOptResultSize",
|
||||
"setOptTagOrOperator",
|
||||
"setOptLang",
|
||||
"setOptHideDuplicates"
|
||||
"setOptHideDuplicates",
|
||||
"setOptHideLegacy",
|
||||
"setOptUpdateMimeMap",
|
||||
"setOptUseDatePicker",
|
||||
"setOptVidPreviewInterval",
|
||||
]),
|
||||
onResetClick() {
|
||||
localStorage.removeItem("sist2_configuration");
|
||||
|
||||
131
sist2-vue/src/views/FilePage.vue
Normal file
131
sist2-vue/src/views/FilePage.vue
Normal file
@@ -0,0 +1,131 @@
|
||||
<template>
|
||||
<div style="margin-left: auto; margin-right: auto;" class="container">
|
||||
<Preloader v-if="loading"></Preloader>
|
||||
<b-card v-else-if="!loading && found">
|
||||
<b-card-title :title="doc._source.name + ext(doc)">
|
||||
{{ doc._source.name + ext(doc) }}
|
||||
</b-card-title>
|
||||
|
||||
<!-- Thumbnail-->
|
||||
<div style="position: relative; margin-left: auto; margin-right: auto; text-align: center">
|
||||
<FullThumbnail :doc="doc" :small-badge="false" @onThumbnailClick="onThumbnailClick()"></FullThumbnail>
|
||||
</div>
|
||||
|
||||
<!-- Audio player-->
|
||||
<audio v-if="doc._props.isAudio" ref="audio" preload="none" class="audio-fit fit" controls
|
||||
:type="doc._source.mime"
|
||||
:src="`f/${doc._id}`"></audio>
|
||||
|
||||
<InfoTable :doc="doc" v-if="doc"></InfoTable>
|
||||
|
||||
<div v-if="doc._source.content" class="content-div">{{ doc._source.content }}</div>
|
||||
</b-card>
|
||||
<div v-else>
|
||||
<b-card>
|
||||
<b-card-title>{{ $t("filePage.notFound") }}</b-card-title>
|
||||
</b-card>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Preloader from "@/components/Preloader.vue";
|
||||
import InfoTable from "@/components/InfoTable.vue";
|
||||
import Sist2Api from "@/Sist2Api";
|
||||
import {ext} from "@/util";
|
||||
import Vue from "vue";
|
||||
import sist2 from "@/Sist2Api";
|
||||
import FullThumbnail from "@/components/FullThumbnail";
|
||||
|
||||
export default Vue.extend({
|
||||
name: "FilePage",
|
||||
components: {
|
||||
FullThumbnail,
|
||||
Preloader,
|
||||
InfoTable
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
loading: true,
|
||||
found: false,
|
||||
doc: null
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
ext: ext,
|
||||
onThumbnailClick() {
|
||||
window.open(`/f/${this.doc._id}`, "_blank");
|
||||
},
|
||||
findById(id) {
|
||||
return {
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"_id": id
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
size: 1
|
||||
}
|
||||
},
|
||||
findByName(name) {
|
||||
return {
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"name": name
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
size: 1
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
mounted() {
|
||||
if (this.$store.state.sist2Info === null) {
|
||||
sist2.getSist2Info().then(data => {
|
||||
this.$store.dispatch("setSist2Info", data);
|
||||
this.$store.commit("setIndices", data.indices);
|
||||
});
|
||||
}
|
||||
|
||||
let query = null;
|
||||
if (this.$route.query.byId) {
|
||||
query = this.findById(this.$route.query.byId);
|
||||
} else if (this.$route.query.byName) {
|
||||
query = this.findByName(this.$route.query.byName);
|
||||
}
|
||||
|
||||
if (query) {
|
||||
Sist2Api.esQuery(query).then(result => {
|
||||
if (result.hits.hits.length === 0) {
|
||||
this.found = false;
|
||||
} else {
|
||||
this.doc = result.hits.hits[0];
|
||||
this.found = true;
|
||||
}
|
||||
|
||||
this.loading = false;
|
||||
});
|
||||
} else {
|
||||
this.loading = false;
|
||||
this.found = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.img-wrapper {
|
||||
display: inline-block;
|
||||
}
|
||||
</style>
|
||||
@@ -19,11 +19,7 @@
|
||||
</b-row>
|
||||
<b-row>
|
||||
<b-col sm="6">
|
||||
<b-row>
|
||||
<b-col style="height: 70px;">
|
||||
<DateSlider></DateSlider>
|
||||
</b-col>
|
||||
</b-row>
|
||||
<b-row>
|
||||
<b-col>
|
||||
<IndexPicker></IndexPicker>
|
||||
@@ -31,7 +27,7 @@
|
||||
</b-row>
|
||||
</b-col>
|
||||
<b-col>
|
||||
<b-tabs>
|
||||
<b-tabs justified>
|
||||
<b-tab :title="$t('mimeTypes')">
|
||||
<MimePicker></MimePicker>
|
||||
</b-tab>
|
||||
@@ -43,9 +39,13 @@
|
||||
</b-row>
|
||||
</b-card>
|
||||
|
||||
<Preloader v-if="searchBusy && docs.length === 0" class="mt-3"></Preloader>
|
||||
<div v-show="docs.length === 0 && !uiLoading">
|
||||
<Preloader v-if="searchBusy" class="mt-3"></Preloader>
|
||||
|
||||
<div v-else-if="docs.length > 0">
|
||||
<ResultsCard></ResultsCard>
|
||||
</div>
|
||||
|
||||
<div v-if="docs.length > 0">
|
||||
<ResultsCard></ResultsCard>
|
||||
|
||||
<DocCardWall v-if="optDisplay==='grid'" :docs="docs" :append="appendFunc"></DocCardWall>
|
||||
@@ -56,7 +56,7 @@
|
||||
|
||||
<script lang="ts">
|
||||
import Preloader from "@/components/Preloader.vue";
|
||||
import {mapGetters, mapMutations} from "vuex";
|
||||
import {mapActions, mapGetters, mapMutations} from "vuex";
|
||||
import sist2 from "../Sist2Api";
|
||||
import Sist2Api, {EsHit, EsResult} from "../Sist2Api";
|
||||
import SearchBar from "@/components/SearchBar.vue";
|
||||
@@ -100,6 +100,10 @@ export default Vue.extend({
|
||||
...mapGetters(["indices", "optDisplay"]),
|
||||
},
|
||||
mounted() {
|
||||
// Handle touch events
|
||||
window.ontouchend = () => this.$store.commit("busTouchEnd");
|
||||
window.ontouchcancel = this.$store.commit("busTouchEnd");
|
||||
|
||||
this.search = _debounce(async (clear: boolean) => {
|
||||
if (clear) {
|
||||
await this.clearResults();
|
||||
@@ -109,10 +113,6 @@ export default Vue.extend({
|
||||
|
||||
}, 350, {leading: false});
|
||||
|
||||
Sist2Api.getMimeTypes().then(mimeMap => {
|
||||
this.$store.commit("setUiMimeMap", mimeMap);
|
||||
});
|
||||
|
||||
this.$store.dispatch("loadFromArgs", this.$route).then(() => {
|
||||
this.$store.subscribe(() => this.$store.dispatch("updateArgs", this.$router));
|
||||
this.$store.subscribe((mutation) => {
|
||||
@@ -138,17 +138,23 @@ export default Vue.extend({
|
||||
sist2.getSist2Info().then(data => {
|
||||
this.setSist2Info(data);
|
||||
this.setIndices(data.indices);
|
||||
this.uiLoading = false;
|
||||
|
||||
Sist2Api.getMimeTypes(Sist2Query.searchQuery()).then(({mimeMap}) => {
|
||||
this.$store.commit("setUiMimeMap", mimeMap);
|
||||
this.uiLoading = false;
|
||||
this.search(true);
|
||||
});
|
||||
|
||||
}).catch(() => {
|
||||
this.showErrorToast();
|
||||
});
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
...mapMutations({
|
||||
...mapActions({
|
||||
setSist2Info: "setSist2Info",
|
||||
}),
|
||||
...mapMutations({
|
||||
setIndices: "setIndices",
|
||||
setDateBoundsMin: "setDateBoundsMin",
|
||||
setDateBoundsMax: "setDateBoundsMax",
|
||||
@@ -179,6 +185,7 @@ export default Vue.extend({
|
||||
async searchNow(q: any) {
|
||||
this.searchBusy = true;
|
||||
await this.$store.dispatch("incrementQuerySequence");
|
||||
this.$store.commit("busSearch");
|
||||
|
||||
Sist2Api.esQuery(q).then(async (resp: EsResult) => {
|
||||
await this.handleSearch(resp);
|
||||
@@ -280,6 +287,11 @@ export default Vue.extend({
|
||||
border: none;
|
||||
}
|
||||
|
||||
.toast-header-info, .toast-body-info {
|
||||
background: #2196f3;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
.toast-header-error, .toast-body-error {
|
||||
background: #a94442;
|
||||
color: #f2dede !important;
|
||||
|
||||
180
src/cli.c
180
src/cli.c
@@ -5,7 +5,8 @@
|
||||
#define DEFAULT_OUTPUT "index.sist2/"
|
||||
#define DEFAULT_CONTENT_SIZE 32768
|
||||
#define DEFAULT_QUALITY 1
|
||||
#define DEFAULT_SIZE 300
|
||||
#define DEFAULT_THUMBNAIL_SIZE 500
|
||||
#define DEFAULT_THUMBNAIL_COUNT 1
|
||||
#define DEFAULT_REWRITE_URL ""
|
||||
|
||||
#define DEFAULT_ES_URL "http://localhost:9200"
|
||||
@@ -19,9 +20,12 @@
|
||||
|
||||
#define DEFAULT_MAX_MEM_BUFFER 2000
|
||||
|
||||
#define DEFAULT_THROTTLE_MEMORY_THRESHOLD 0
|
||||
|
||||
const char *TESS_DATAPATHS[] = {
|
||||
"/usr/share/tessdata/",
|
||||
"/usr/share/tesseract-ocr/tessdata/",
|
||||
"/usr/share/tesseract-ocr/4.00/tessdata/",
|
||||
"./",
|
||||
NULL
|
||||
};
|
||||
@@ -64,6 +68,10 @@ void index_args_destroy(index_args_t *args) {
|
||||
if (args->es_settings_path) {
|
||||
free(args->es_settings);
|
||||
}
|
||||
|
||||
if (args->index_path != NULL) {
|
||||
free(args->index_path);
|
||||
}
|
||||
free(args);
|
||||
}
|
||||
|
||||
@@ -84,13 +92,12 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
|
||||
char *abs_path = abspath(argv[1]);
|
||||
if (abs_path == NULL) {
|
||||
fprintf(stderr, "File not found: %s\n", argv[1]);
|
||||
return 1;
|
||||
LOG_FATALF("cli.c", "Invalid PATH argument. File not found: %s", argv[1])
|
||||
} else {
|
||||
args->path = abs_path;
|
||||
}
|
||||
|
||||
if (args->incremental != NULL) {
|
||||
if (args->incremental != OPTION_VALUE_UNSPECIFIED) {
|
||||
args->incremental = abspath(args->incremental);
|
||||
if (abs_path == NULL) {
|
||||
sist_log("main.c", LOG_SIST_WARNING, "Could not open original index! Disabled incremental scan feature.");
|
||||
@@ -98,32 +105,39 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
}
|
||||
}
|
||||
|
||||
if (args->quality == 0) {
|
||||
args->quality = DEFAULT_QUALITY;
|
||||
} else if (args->quality < 1 || args->quality > 31) {
|
||||
fprintf(stderr, "Invalid quality: %f\n", args->quality);
|
||||
if (args->tn_quality == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->tn_quality = DEFAULT_QUALITY;
|
||||
} else if (args->tn_quality < 1.0f || args->tn_quality > 31.0f) {
|
||||
fprintf(stderr, "Invalid value for --thumbnail-quality argument: %f. Must be within [1.0, 31.0].\n",
|
||||
args->tn_quality);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (args->size == 0) {
|
||||
args->size = DEFAULT_SIZE;
|
||||
} else if (args->size > 0 && args->size < 32) {
|
||||
printf("Invalid size: %d\n", args->content_size);
|
||||
if (args->tn_size == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->tn_size = DEFAULT_THUMBNAIL_SIZE;
|
||||
} else if (args->tn_size < 32) {
|
||||
printf("Invalid value --thumbnail-size argument: %d. Must be greater than 32 pixels.\n", args->tn_size);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (args->content_size == 0) {
|
||||
if (args->tn_count == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->tn_count = DEFAULT_THUMBNAIL_COUNT;
|
||||
} else if (args->tn_count == OPTION_VALUE_DISABLE) {
|
||||
args->tn_count = 0;
|
||||
}
|
||||
|
||||
if (args->content_size == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->content_size = DEFAULT_CONTENT_SIZE;
|
||||
}
|
||||
|
||||
if (args->threads == 0) {
|
||||
args->threads = 1;
|
||||
} else if (args->threads < 0) {
|
||||
fprintf(stderr, "Invalid threads: %d\n", args->threads);
|
||||
fprintf(stderr, "Invalid value for --threads: %d. Must be a positive number\n", args->threads);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (args->output == NULL) {
|
||||
if (args->output == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->output = malloc(strlen(DEFAULT_OUTPUT) + 1);
|
||||
strcpy(args->output, DEFAULT_OUTPUT);
|
||||
} else {
|
||||
@@ -142,7 +156,7 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
args->depth += 1;
|
||||
}
|
||||
|
||||
if (args->name == NULL) {
|
||||
if (args->name == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->name = g_path_get_basename(args->output);
|
||||
} else {
|
||||
char *tmp = malloc(strlen(args->name) + 1);
|
||||
@@ -150,11 +164,11 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
args->name = tmp;
|
||||
}
|
||||
|
||||
if (args->rewrite_url == NULL) {
|
||||
if (args->rewrite_url == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->rewrite_url = DEFAULT_REWRITE_URL;
|
||||
}
|
||||
|
||||
if (args->archive == NULL || strcmp(args->archive, "recurse") == 0) {
|
||||
if (args->archive == OPTION_VALUE_UNSPECIFIED || strcmp(args->archive, "recurse") == 0) {
|
||||
args->archive_mode = ARC_MODE_RECURSE;
|
||||
} else if (strcmp(args->archive, "list") == 0) {
|
||||
args->archive_mode = ARC_MODE_LIST;
|
||||
@@ -167,17 +181,50 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (args->tesseract_lang != NULL) {
|
||||
TessBaseAPI *api = TessBaseAPICreate();
|
||||
|
||||
char filename[128];
|
||||
sprintf(filename, "%s.traineddata", args->tesseract_lang);
|
||||
const char *path = find_file_in_paths(TESS_DATAPATHS, filename);
|
||||
if (path == NULL) {
|
||||
LOG_FATAL("cli.c", "Could not find tesseract language file!");
|
||||
if (args->ocr_images && args->tesseract_lang == OPTION_VALUE_UNSPECIFIED) {
|
||||
fprintf(stderr, "You must specify --ocr-lang <LANG> to use --ocr-images");
|
||||
return 1;
|
||||
}
|
||||
|
||||
ret = TessBaseAPIInit3(api, path, args->tesseract_lang);
|
||||
if (args->ocr_ebooks && args->tesseract_lang == OPTION_VALUE_UNSPECIFIED) {
|
||||
fprintf(stderr, "You must specify --ocr-lang <LANG> to use --ocr-ebooks");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (args->tesseract_lang != OPTION_VALUE_UNSPECIFIED) {
|
||||
|
||||
if (!args->ocr_ebooks && !args->ocr_images) {
|
||||
fprintf(stderr, "You must specify at least one of --ocr-ebooks, --ocr-images");
|
||||
return 1;
|
||||
}
|
||||
|
||||
TessBaseAPI *api = TessBaseAPICreate();
|
||||
|
||||
const char *trained_data_path = NULL;
|
||||
char *lang = malloc(strlen(args->tesseract_lang) + 1);
|
||||
strcpy(lang, args->tesseract_lang);
|
||||
|
||||
lang = strtok(lang, "+");
|
||||
|
||||
while (lang != NULL) {
|
||||
char filename[128];
|
||||
sprintf(filename, "%s.traineddata", lang);
|
||||
|
||||
const char *path = find_file_in_paths(TESS_DATAPATHS, filename);
|
||||
if (path == NULL) {
|
||||
LOG_FATALF("cli.c", "Could not find tesseract language file: %s!", filename);
|
||||
}
|
||||
if (trained_data_path != NULL && path != trained_data_path) {
|
||||
LOG_FATAL("cli.c", "When specifying more than one tesseract language, all the traineddata "
|
||||
"files must be in the same folder")
|
||||
}
|
||||
trained_data_path = path;
|
||||
|
||||
lang = strtok(NULL, "+");
|
||||
}
|
||||
free(lang);
|
||||
|
||||
ret = TessBaseAPIInit3(api, trained_data_path, args->tesseract_lang);
|
||||
if (ret != 0) {
|
||||
fprintf(stderr, "Could not initialize tesseract with lang '%s'\n", args->tesseract_lang);
|
||||
return 1;
|
||||
@@ -185,10 +232,10 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
TessBaseAPIEnd(api);
|
||||
TessBaseAPIDelete(api);
|
||||
|
||||
args->tesseract_path = path;
|
||||
args->tesseract_path = trained_data_path;
|
||||
}
|
||||
|
||||
if (args->exclude_regex != NULL) {
|
||||
if (args->exclude_regex != OPTION_VALUE_UNSPECIFIED) {
|
||||
const char *error;
|
||||
int error_offset;
|
||||
|
||||
@@ -208,18 +255,36 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
ScanCtx.exclude = NULL;
|
||||
}
|
||||
|
||||
if (args->treemap_threshold_str == 0) {
|
||||
if (args->treemap_threshold_str == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->treemap_threshold = DEFAULT_TREEMAP_THRESHOLD;
|
||||
} else {
|
||||
args->treemap_threshold = atof(args->treemap_threshold_str);
|
||||
}
|
||||
|
||||
if (args->max_memory_buffer == 0) {
|
||||
args->max_memory_buffer = DEFAULT_MAX_MEM_BUFFER;
|
||||
if (args->max_memory_buffer_mib == OPTION_VALUE_UNSPECIFIED) {
|
||||
args->max_memory_buffer_mib = DEFAULT_MAX_MEM_BUFFER;
|
||||
}
|
||||
|
||||
LOG_DEBUGF("cli.c", "arg quality=%f", args->quality)
|
||||
LOG_DEBUGF("cli.c", "arg size=%d", args->size)
|
||||
if (args->scan_mem_limit_mib == OPTION_VALUE_UNSPECIFIED || args->scan_mem_limit_mib == OPTION_VALUE_DISABLE) {
|
||||
args->scan_mem_limit_mib = DEFAULT_THROTTLE_MEMORY_THRESHOLD;
|
||||
}
|
||||
|
||||
if (args->list_path != OPTION_VALUE_UNSPECIFIED) {
|
||||
if (strcmp(args->list_path, "-") == 0) {
|
||||
args->list_file = stdin;
|
||||
LOG_DEBUG("cli.c", "Using stdin as list file")
|
||||
} else {
|
||||
args->list_file = fopen(args->list_path, "r");
|
||||
|
||||
if (args->list_file == NULL) {
|
||||
LOG_FATALF("main.c", "List file could not be opened: %s (%s)", args->list_path, errno);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LOG_DEBUGF("cli.c", "arg tn_quality=%f", args->tn_quality)
|
||||
LOG_DEBUGF("cli.c", "arg tn_size=%d", args->tn_size)
|
||||
LOG_DEBUGF("cli.c", "arg tn_count=%d", args->tn_count)
|
||||
LOG_DEBUGF("cli.c", "arg content_size=%d", args->content_size)
|
||||
LOG_DEBUGF("cli.c", "arg threads=%d", args->threads)
|
||||
LOG_DEBUGF("cli.c", "arg incremental=%s", args->incremental)
|
||||
@@ -236,7 +301,8 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv) {
|
||||
LOG_DEBUGF("cli.c", "arg fast=%d", args->fast)
|
||||
LOG_DEBUGF("cli.c", "arg fast_epub=%d", args->fast_epub)
|
||||
LOG_DEBUGF("cli.c", "arg treemap_threshold=%f", args->treemap_threshold)
|
||||
LOG_DEBUGF("cli.c", "arg max_memory_buffer=%d", args->max_memory_buffer)
|
||||
LOG_DEBUGF("cli.c", "arg max_memory_buffer_mib=%d", args->max_memory_buffer_mib)
|
||||
LOG_DEBUGF("cli.c", "arg list_path=%s", args->list_path)
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -287,11 +353,9 @@ int index_args_validate(index_args_t *args, int argc, const char **argv) {
|
||||
|
||||
char *index_path = abspath(argv[1]);
|
||||
if (index_path == NULL) {
|
||||
fprintf(stderr, "File not found: %s\n", argv[1]);
|
||||
return 1;
|
||||
LOG_FATALF("cli.c", "Invalid PATH argument. File not found: %s", argv[1])
|
||||
} else {
|
||||
args->index_path = argv[1];
|
||||
free(index_path);
|
||||
args->index_path = index_path;
|
||||
}
|
||||
|
||||
if (args->es_url == NULL) {
|
||||
@@ -328,8 +392,16 @@ int index_args_validate(index_args_t *args, int argc, const char **argv) {
|
||||
LOG_DEBUGF("cli.c", "arg es_index=%s", args->es_index)
|
||||
LOG_DEBUGF("cli.c", "arg index_path=%s", args->index_path)
|
||||
LOG_DEBUGF("cli.c", "arg script_path=%s", args->script_path)
|
||||
LOG_DEBUGF("cli.c", "arg async_script=%s", args->async_script)
|
||||
LOG_DEBUGF("cli.c", "arg script=%s", args->script)
|
||||
LOG_DEBUGF("cli.c", "arg async_script=%d", args->async_script)
|
||||
|
||||
if (args->script) {
|
||||
char log_buf[5000];
|
||||
|
||||
strncpy(log_buf, args->script, sizeof(log_buf));
|
||||
*(log_buf + sizeof(log_buf) - 1) = '\0';
|
||||
LOG_DEBUGF("cli.c", "arg script=%s", log_buf)
|
||||
}
|
||||
|
||||
LOG_DEBUGF("cli.c", "arg print=%d", args->print)
|
||||
LOG_DEBUGF("cli.c", "arg es_mappings_path=%s", args->es_mappings_path)
|
||||
LOG_DEBUGF("cli.c", "arg es_mappings=%s", args->es_mappings)
|
||||
@@ -362,15 +434,15 @@ int web_args_validate(web_args_t *args, int argc, const char **argv) {
|
||||
args->es_index = DEFAULT_ES_INDEX;
|
||||
}
|
||||
|
||||
if (args->lang == NULL) {
|
||||
args->lang = DEFAULT_LANG;
|
||||
}
|
||||
|
||||
if (args->tagline == NULL) {
|
||||
args->tagline = DEFAULT_TAGLINE;
|
||||
}
|
||||
|
||||
if (strlen(args->lang) != 2) {
|
||||
if (args->lang == NULL) {
|
||||
args->lang = DEFAULT_LANG;
|
||||
}
|
||||
|
||||
if (strlen(args->lang) != 2 && strlen(args->lang) != 5) {
|
||||
fprintf(stderr, "Invalid --lang value, see usage\n");
|
||||
return 1;
|
||||
}
|
||||
@@ -426,8 +498,7 @@ int web_args_validate(web_args_t *args, int argc, const char **argv) {
|
||||
for (int i = 0; i < args->index_count; i++) {
|
||||
char *abs_path = abspath(args->indices[i]);
|
||||
if (abs_path == NULL) {
|
||||
fprintf(stderr, "File not found: %s\n", args->indices[i]);
|
||||
return 1;
|
||||
LOG_FATALF("cli.c", "Index not found: %s", args->indices[i])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -467,11 +538,9 @@ int exec_args_validate(exec_args_t *args, int argc, const char **argv) {
|
||||
|
||||
char *index_path = abspath(argv[1]);
|
||||
if (index_path == NULL) {
|
||||
fprintf(stderr, "File not found: %s\n", argv[1]);
|
||||
return 1;
|
||||
LOG_FATALF("cli.c", "Invalid index PATH argument. File not found: %s", argv[1])
|
||||
} else {
|
||||
args->index_path = argv[1];
|
||||
free(index_path);
|
||||
args->index_path = index_path;
|
||||
}
|
||||
|
||||
if (args->es_url == NULL) {
|
||||
@@ -491,6 +560,11 @@ int exec_args_validate(exec_args_t *args, int argc, const char **argv) {
|
||||
}
|
||||
|
||||
LOG_DEBUGF("cli.c", "arg script_path=%s", args->script_path)
|
||||
LOG_DEBUGF("cli.c", "arg script=%s", args->script)
|
||||
|
||||
char log_buf[5000];
|
||||
strncpy(log_buf, args->script, sizeof(log_buf));
|
||||
*(log_buf + sizeof(log_buf) - 1) = '\0';
|
||||
LOG_DEBUGF("cli.c", "arg script=%s", log_buf)
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
19
src/cli.h
19
src/cli.h
@@ -5,11 +5,15 @@
|
||||
|
||||
#include "libscan/arc/arc.h"
|
||||
|
||||
#define OPTION_VALUE_DISABLE (-1)
|
||||
#define OPTION_VALUE_UNSPECIFIED (0)
|
||||
|
||||
typedef struct scan_args {
|
||||
float quality;
|
||||
int size;
|
||||
float tn_quality;
|
||||
int tn_size;
|
||||
int content_size;
|
||||
int threads;
|
||||
int scan_mem_limit_mib;
|
||||
char *incremental;
|
||||
char *output;
|
||||
char *rewrite_url;
|
||||
@@ -21,14 +25,20 @@ typedef struct scan_args {
|
||||
char *archive_passphrase;
|
||||
char *tesseract_lang;
|
||||
const char *tesseract_path;
|
||||
int ocr_images;
|
||||
int ocr_ebooks;
|
||||
char *exclude_regex;
|
||||
int fast;
|
||||
const char* treemap_threshold_str;
|
||||
double treemap_threshold;
|
||||
int max_memory_buffer;
|
||||
int max_memory_buffer_mib;
|
||||
int read_subtitles;
|
||||
/** Number of thumbnails to generate */
|
||||
int tn_count;
|
||||
int fast_epub;
|
||||
int calculate_checksums;
|
||||
char *list_path;
|
||||
FILE *list_file;
|
||||
} scan_args_t;
|
||||
|
||||
scan_args_t *scan_args_create();
|
||||
@@ -40,7 +50,7 @@ int scan_args_validate(scan_args_t *args, int argc, const char **argv);
|
||||
typedef struct index_args {
|
||||
char *es_url;
|
||||
char *es_index;
|
||||
const char *index_path;
|
||||
char *index_path;
|
||||
const char *script_path;
|
||||
char *script;
|
||||
const char *es_settings_path;
|
||||
@@ -52,6 +62,7 @@ typedef struct index_args {
|
||||
int async_script;
|
||||
int force_reset;
|
||||
int threads;
|
||||
int incremental;
|
||||
} index_args_t;
|
||||
|
||||
typedef struct web_args {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
ScanCtx_t ScanCtx = {
|
||||
.stat_index_size = 0,
|
||||
.stat_tn_size = 0,
|
||||
.dbg_current_files = NULL,
|
||||
.pool = NULL
|
||||
};
|
||||
|
||||
12
src/ctx.h
12
src/ctx.h
@@ -17,6 +17,7 @@
|
||||
#include "libscan/wpd/wpd.h"
|
||||
#include "libscan/json/json.h"
|
||||
#include "src/io/store.h"
|
||||
#include "src/index/elastic.h"
|
||||
|
||||
#include <glib.h>
|
||||
#include <pcre.h>
|
||||
@@ -34,12 +35,15 @@ typedef struct {
|
||||
int threads;
|
||||
int depth;
|
||||
int calculate_checksums;
|
||||
size_t mem_limit;
|
||||
|
||||
size_t stat_tn_size;
|
||||
size_t stat_index_size;
|
||||
|
||||
GHashTable *original_table;
|
||||
GHashTable *copy_table;
|
||||
GHashTable *new_table;
|
||||
pthread_mutex_t copy_table_mu;
|
||||
|
||||
pcre *exclude;
|
||||
pcre_extra *exclude_extra;
|
||||
@@ -75,6 +79,7 @@ typedef struct {
|
||||
|
||||
typedef struct {
|
||||
char *es_url;
|
||||
es_version_t *es_version;
|
||||
char *es_index;
|
||||
int batch_size;
|
||||
tpool_t *pool;
|
||||
@@ -82,10 +87,15 @@ typedef struct {
|
||||
GHashTable *tags;
|
||||
store_t *meta_store;
|
||||
GHashTable *meta;
|
||||
/**
|
||||
* Set to false when using --print
|
||||
*/
|
||||
int needs_es_connection;
|
||||
} IndexCtx_t;
|
||||
|
||||
typedef struct {
|
||||
char *es_url;
|
||||
es_version_t *es_version;
|
||||
char *es_index;
|
||||
int index_count;
|
||||
char *auth_user;
|
||||
@@ -94,7 +104,7 @@ typedef struct {
|
||||
int tag_auth_enabled;
|
||||
char *tagline;
|
||||
struct index_t indices[256];
|
||||
char lang[3];
|
||||
char lang[10];
|
||||
int dev;
|
||||
} WebCtx_t;
|
||||
|
||||
|
||||
@@ -15,19 +15,34 @@ typedef struct es_indexer {
|
||||
} es_indexer_t;
|
||||
|
||||
|
||||
static __thread es_indexer_t *Indexer;
|
||||
static __thread es_indexer_t *Indexer = NULL;
|
||||
|
||||
void delete_queue(int max);
|
||||
void free_queue(int max);
|
||||
|
||||
void elastic_flush();
|
||||
|
||||
void elastic_cleanup() {
|
||||
elastic_flush();
|
||||
if (Indexer != NULL) {
|
||||
free(Indexer->es_index);
|
||||
free(Indexer->es_url);
|
||||
free(Indexer);
|
||||
void destroy_indexer(es_indexer_t *indexer) {
|
||||
|
||||
if (indexer == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
LOG_DEBUG("elastic.c", "Destroying indexer")
|
||||
|
||||
if (indexer->es_url != NULL) {
|
||||
free(indexer->es_url);
|
||||
free(indexer->es_index);
|
||||
}
|
||||
|
||||
free(indexer);
|
||||
}
|
||||
|
||||
void elastic_cleanup() {
|
||||
if (IndexCtx.needs_es_connection) {
|
||||
elastic_flush();
|
||||
}
|
||||
|
||||
destroy_indexer(Indexer);
|
||||
}
|
||||
|
||||
void print_json(cJSON *document, const char id_str[MD5_STR_LENGTH]) {
|
||||
@@ -52,11 +67,22 @@ void index_json_func(void *arg) {
|
||||
elastic_index_line(line);
|
||||
}
|
||||
|
||||
void delete_document(const char* document_id_str, void* UNUSED(_data)) {
|
||||
es_bulk_line_t *bulk_line = malloc(sizeof(es_bulk_line_t));
|
||||
bulk_line->type = ES_BULK_LINE_DELETE;
|
||||
bulk_line->next = NULL;
|
||||
|
||||
memcpy(bulk_line->path_md5_str, document_id_str, MD5_STR_LENGTH);
|
||||
tpool_add_work(IndexCtx.pool, index_json_func, bulk_line);
|
||||
}
|
||||
|
||||
|
||||
void index_json(cJSON *document, const char index_id_str[MD5_STR_LENGTH]) {
|
||||
char *json = cJSON_PrintUnformatted(document);
|
||||
|
||||
size_t json_len = strlen(json);
|
||||
es_bulk_line_t *bulk_line = malloc(sizeof(es_bulk_line_t) + json_len + 2);
|
||||
bulk_line->type = ES_BULK_LINE_INDEX;
|
||||
memcpy(bulk_line->line, json, json_len);
|
||||
memcpy(bulk_line->path_md5_str, index_id_str, MD5_STR_LENGTH);
|
||||
*(bulk_line->line + json_len) = '\n';
|
||||
@@ -125,9 +151,19 @@ void *create_bulk_buffer(int max, int *count, size_t *buf_len) {
|
||||
size_t buf_cur = 0;
|
||||
char *buf = malloc(8192);
|
||||
size_t buf_capacity = 8192;
|
||||
#define GROW_BUF(delta) \
|
||||
while (buf_size + (delta) > buf_capacity) { \
|
||||
buf_capacity *= 2; \
|
||||
buf = realloc(buf, buf_capacity); \
|
||||
} \
|
||||
buf_size += (delta); \
|
||||
|
||||
// see: https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
|
||||
// ES_BULK_LINE_INDEX: two lines, 1st action, 2nd content
|
||||
// ES_BULK_LINE_DELETE: one line
|
||||
while (line != NULL && *count < max) {
|
||||
char action_str[256];
|
||||
if (line->type == ES_BULK_LINE_INDEX) {
|
||||
snprintf(
|
||||
action_str, sizeof(action_str),
|
||||
"{\"index\":{\"_id\":\"%s\",\"_type\":\"_doc\",\"_index\":\"%s\"}}\n",
|
||||
@@ -137,18 +173,25 @@ void *create_bulk_buffer(int max, int *count, size_t *buf_len) {
|
||||
size_t action_str_len = strlen(action_str);
|
||||
size_t line_len = strlen(line->line);
|
||||
|
||||
while (buf_size + line_len + action_str_len > buf_capacity) {
|
||||
buf_capacity *= 2;
|
||||
buf = realloc(buf, buf_capacity);
|
||||
}
|
||||
|
||||
buf_size += line_len + action_str_len;
|
||||
GROW_BUF(action_str_len + line_len);
|
||||
|
||||
memcpy(buf + buf_cur, action_str, action_str_len);
|
||||
buf_cur += action_str_len;
|
||||
memcpy(buf + buf_cur, line->line, line_len);
|
||||
buf_cur += line_len;
|
||||
|
||||
} else if (line->type == ES_BULK_LINE_DELETE) {
|
||||
snprintf(
|
||||
action_str, sizeof(action_str),
|
||||
"{\"delete\":{\"_id\":\"%s\",\"_index\":\"%s\"}}\n",
|
||||
line->path_md5_str, Indexer->es_index
|
||||
);
|
||||
|
||||
size_t action_str_len = strlen(action_str);
|
||||
GROW_BUF(action_str_len);
|
||||
memcpy(buf + buf_cur, action_str, action_str_len);
|
||||
buf_cur += action_str_len;
|
||||
}
|
||||
line = line->next;
|
||||
(*count)++;
|
||||
}
|
||||
@@ -223,7 +266,7 @@ void _elastic_flush(int max) {
|
||||
LOG_ERRORF("elastic.c", "Single document too large, giving up: {%s}", Indexer->line_head->path_md5_str)
|
||||
free_response(r);
|
||||
free(buf);
|
||||
delete_queue(1);
|
||||
free_queue(1);
|
||||
if (Indexer->queued != 0) {
|
||||
elastic_flush();
|
||||
}
|
||||
@@ -248,13 +291,13 @@ void _elastic_flush(int max) {
|
||||
|
||||
} else if (r->status_code != 200) {
|
||||
print_errors(r);
|
||||
delete_queue(Indexer->queued);
|
||||
free_queue(Indexer->queued);
|
||||
|
||||
} else {
|
||||
|
||||
print_errors(r);
|
||||
LOG_INFOF("elastic.c", "Indexed %d documents (%zukB) <%d>", count, buf_len / 1024, r->status_code);
|
||||
delete_queue(max);
|
||||
LOG_DEBUGF("elastic.c", "Indexed %d documents (%zukB) <%d>", count, buf_len / 1024, r->status_code);
|
||||
free_queue(max);
|
||||
|
||||
if (Indexer->queued != 0) {
|
||||
elastic_flush();
|
||||
@@ -265,7 +308,7 @@ void _elastic_flush(int max) {
|
||||
free(buf);
|
||||
}
|
||||
|
||||
void delete_queue(int max) {
|
||||
void free_queue(int max) {
|
||||
for (int i = 0; i < max; i++) {
|
||||
es_bulk_line_t *tmp = Indexer->line_head;
|
||||
Indexer->line_head = tmp->next;
|
||||
@@ -309,16 +352,22 @@ void elastic_index_line(es_bulk_line_t *line) {
|
||||
|
||||
es_indexer_t *create_indexer(const char *url, const char *index) {
|
||||
|
||||
es_indexer_t *indexer = malloc(sizeof(es_indexer_t));
|
||||
|
||||
if (IndexCtx.needs_es_connection) {
|
||||
char *es_url = malloc(strlen(url) + 1);
|
||||
strcpy(es_url, url);
|
||||
|
||||
char *es_index = malloc(strlen(index) + 1);
|
||||
strcpy(es_index, index);
|
||||
|
||||
es_indexer_t *indexer = malloc(sizeof(es_indexer_t));
|
||||
|
||||
indexer->es_url = es_url;
|
||||
indexer->es_index = es_index;
|
||||
} else {
|
||||
indexer->es_url = NULL;
|
||||
indexer->es_index = NULL;
|
||||
}
|
||||
|
||||
indexer->queued = 0;
|
||||
indexer->line_head = NULL;
|
||||
indexer->line_tail = NULL;
|
||||
@@ -356,8 +405,66 @@ void finish_indexer(char *script, int async_script, char *index_id) {
|
||||
free_response(r);
|
||||
}
|
||||
|
||||
es_version_t *elastic_get_version(const char *es_url) {
|
||||
response_t *r = web_get(es_url, 30);
|
||||
|
||||
char *tmp = malloc(r->size + 1);
|
||||
memcpy(tmp, r->body, r->size);
|
||||
*(tmp + r->size) = '\0';
|
||||
cJSON *response = cJSON_Parse(tmp);
|
||||
free(tmp);
|
||||
free_response(r);
|
||||
|
||||
if (response == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (cJSON_GetObjectItem(response, "version") == NULL ||
|
||||
cJSON_GetObjectItem(cJSON_GetObjectItem(response, "version"), "number") == NULL) {
|
||||
cJSON_Delete(response);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
char *version_str = cJSON_GetObjectItem(cJSON_GetObjectItem(response, "version"), "number")->valuestring;
|
||||
|
||||
es_version_t *version = malloc(sizeof(es_version_t));
|
||||
|
||||
const char *tok = strtok(version_str, ".");
|
||||
version->major = atoi(tok);
|
||||
tok = strtok(NULL, ".");
|
||||
version->minor = atoi(tok);
|
||||
tok = strtok(NULL, ".");
|
||||
version->patch = atoi(tok);
|
||||
|
||||
cJSON_Delete(response);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
void elastic_init(int force_reset, const char *user_mappings, const char *user_settings) {
|
||||
|
||||
es_version_t *es_version = elastic_get_version(IndexCtx.es_url);
|
||||
IndexCtx.es_version = es_version;
|
||||
|
||||
if (es_version == NULL) {
|
||||
LOG_FATAL("elastic.c", "Could not get ES version")
|
||||
}
|
||||
|
||||
LOG_INFOF("elastic.c",
|
||||
"Elasticsearch version is %s (supported=%d, legacy=%d)",
|
||||
format_es_version(es_version), IS_SUPPORTED_ES_VERSION(es_version), USE_LEGACY_ES_SETTINGS(es_version));
|
||||
|
||||
if (!IS_SUPPORTED_ES_VERSION(es_version)) {
|
||||
LOG_FATAL("elastic.c", "sist2 only supports Elasticsearch v6.8 or newer")
|
||||
}
|
||||
|
||||
char *settings = NULL;
|
||||
if (USE_LEGACY_ES_SETTINGS(es_version)) {
|
||||
settings = settings_json;
|
||||
} else {
|
||||
settings = settings_legacy_json;
|
||||
}
|
||||
|
||||
// Check if index exists
|
||||
char url[4096];
|
||||
snprintf(url, sizeof(url), "%s/%s", IndexCtx.es_url, IndexCtx.es_index);
|
||||
@@ -392,7 +499,7 @@ void elastic_init(int force_reset, const char* user_mappings, const char* user_s
|
||||
free_response(r);
|
||||
|
||||
snprintf(url, sizeof(url), "%s/%s/_settings", IndexCtx.es_url, IndexCtx.es_index);
|
||||
r = web_put(url, user_settings ? user_settings : settings_json);
|
||||
r = web_put(url, user_settings ? user_settings : settings);
|
||||
LOG_INFOF("elastic.c", "Update ES settings <%d>", r->status_code);
|
||||
if (r->status_code != 200) {
|
||||
print_error(r);
|
||||
|
||||
@@ -3,12 +3,36 @@
|
||||
|
||||
#include "src/sist.h"
|
||||
|
||||
#define ES_BULK_LINE_INDEX 0
|
||||
#define ES_BULK_LINE_DELETE 1
|
||||
|
||||
typedef struct es_bulk_line {
|
||||
struct es_bulk_line *next;
|
||||
char path_md5_str[MD5_STR_LENGTH];
|
||||
int type;
|
||||
char line[0];
|
||||
} es_bulk_line_t;
|
||||
|
||||
typedef struct {
|
||||
int major;
|
||||
int minor;
|
||||
int patch;
|
||||
} es_version_t;
|
||||
|
||||
#define VERSION_GE(version, maj, min) ((version)->major > (maj) || ((version)->major == (maj) && (version)->minor >= (min)))
|
||||
#define IS_SUPPORTED_ES_VERSION(es_version) VERSION_GE((es_version), 6, 8)
|
||||
#define USE_LEGACY_ES_SETTINGS(es_version) (!VERSION_GE((es_version), 7, 14))
|
||||
|
||||
__always_inline
|
||||
static const char *format_es_version(es_version_t *version) {
|
||||
static char buf[64];
|
||||
|
||||
snprintf(buf, sizeof(buf), "%d.%d.%d", version->major, version->minor, version->patch);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Note: indexer is *not* thread safe
|
||||
*/
|
||||
@@ -20,6 +44,8 @@ void print_json(cJSON *document, const char index_id_str[MD5_STR_LENGTH]);
|
||||
|
||||
void index_json(cJSON *document, const char index_id_str[MD5_STR_LENGTH]);
|
||||
|
||||
void delete_document(const char *document_id_str, void* data);
|
||||
|
||||
es_indexer_t *create_indexer(const char *url, const char *index);
|
||||
|
||||
void elastic_cleanup();
|
||||
@@ -31,6 +57,8 @@ cJSON *elastic_get_document(const char *id_str);
|
||||
|
||||
char *elastic_get_status();
|
||||
|
||||
es_version_t *elastic_get_version(const char *es_url);
|
||||
|
||||
void execute_update_script(const char *script, int async, const char index_id[MD5_STR_LENGTH]);
|
||||
|
||||
#endif
|
||||
|
||||
3
src/index/static_generated.c
vendored
3
src/index/static_generated.c
vendored
File diff suppressed because one or more lines are too long
@@ -38,6 +38,8 @@ char *get_meta_key_text(enum metakey meta_key) {
|
||||
return "parent";
|
||||
case MetaExifMake:
|
||||
return "exif_make";
|
||||
case MetaExifDescription:
|
||||
return "exif_description";
|
||||
case MetaExifSoftware:
|
||||
return "exif_software";
|
||||
case MetaExifExposureTime:
|
||||
@@ -131,6 +133,7 @@ char *build_json_string(document_t *doc) {
|
||||
while (meta != NULL) {
|
||||
|
||||
switch (meta->key) {
|
||||
case MetaThumbnail:
|
||||
case MetaPages:
|
||||
case MetaWidth:
|
||||
case MetaHeight:
|
||||
@@ -150,6 +153,7 @@ char *build_json_string(document_t *doc) {
|
||||
case MetaFontName:
|
||||
case MetaParent:
|
||||
case MetaExifMake:
|
||||
case MetaExifDescription:
|
||||
case MetaExifSoftware:
|
||||
case MetaExifExposureTime:
|
||||
case MetaExifFNumber:
|
||||
@@ -160,7 +164,6 @@ char *build_json_string(document_t *doc) {
|
||||
case MetaExifModel:
|
||||
case MetaAuthor:
|
||||
case MetaModifiedBy:
|
||||
case MetaThumbnail:
|
||||
case MetaExifGpsLongitudeDMS:
|
||||
case MetaExifGpsLongitudeDec:
|
||||
case MetaExifGpsLongitudeRef:
|
||||
@@ -395,7 +398,7 @@ void read_index_bin_handle_line(const char *line, const char *index_id, index_fu
|
||||
}
|
||||
}
|
||||
|
||||
void read_index_ndjson(const char *path, const char *index_id, index_func func) {
|
||||
void read_lines(const char *path, const line_processor_t processor) {
|
||||
dyn_buffer_t buf = dyn_buffer_create();
|
||||
|
||||
// Initialize zstd things
|
||||
@@ -424,7 +427,7 @@ void read_index_ndjson(const char *path, const char *index_id, index_func func)
|
||||
|
||||
if (c == '\n') {
|
||||
dyn_buffer_write_char(&buf, '\0');
|
||||
read_index_bin_handle_line(buf.buf, index_id, func);
|
||||
processor.func(buf.buf, processor.data);
|
||||
buf.cur = 0;
|
||||
} else {
|
||||
dyn_buffer_write_char(&buf, c);
|
||||
@@ -449,12 +452,22 @@ void read_index_ndjson(const char *path, const char *index_id, index_func func)
|
||||
|
||||
dyn_buffer_destroy(&buf);
|
||||
fclose(file);
|
||||
|
||||
}
|
||||
|
||||
void read_index_ndjson(const char *line, void* _data) {
|
||||
void** data = _data;
|
||||
const char* index_id = data[0];
|
||||
index_func func = data[1];
|
||||
read_index_bin_handle_line(line, index_id, func);
|
||||
}
|
||||
|
||||
void read_index(const char *path, const char index_id[MD5_STR_LENGTH], const char *type, index_func func) {
|
||||
|
||||
if (strcmp(type, INDEX_TYPE_NDJSON) == 0) {
|
||||
read_index_ndjson(path, index_id, func);
|
||||
read_lines(path, (line_processor_t) {
|
||||
.data = (void*[2]){(void*)index_id, func} ,
|
||||
.func = read_index_ndjson,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -473,6 +486,7 @@ void incremental_read(GHashTable *table, const char *filepath, index_descriptor_
|
||||
}
|
||||
|
||||
static __thread GHashTable *IncrementalCopyTable = NULL;
|
||||
static __thread GHashTable *IncrementalNewTable = NULL;
|
||||
static __thread store_t *IncrementalCopySourceStore = NULL;
|
||||
static __thread store_t *IncrementalCopyDestinationStore = NULL;
|
||||
|
||||
@@ -521,3 +535,33 @@ void incremental_copy(store_t *store, store_t *dst_store, const char *filepath,
|
||||
|
||||
read_index(filepath, "", INDEX_TYPE_NDJSON, incremental_copy_handle_doc);
|
||||
}
|
||||
|
||||
void incremental_delete_handle_doc(cJSON *document, UNUSED(const char id_str[MD5_STR_LENGTH])) {
|
||||
|
||||
char path_md5_n[MD5_STR_LENGTH + 1];
|
||||
path_md5_n[MD5_STR_LENGTH] = '\0';
|
||||
path_md5_n[MD5_STR_LENGTH - 1] = '\n';
|
||||
const char *path_md5_str = cJSON_GetObjectItem(document, "_id")->valuestring;
|
||||
|
||||
// do not delete archive virtual entries
|
||||
if (cJSON_GetObjectItem(document, "parent") == NULL
|
||||
&& !incremental_get_str(IncrementalCopyTable, path_md5_str)
|
||||
&& !incremental_get_str(IncrementalNewTable, path_md5_str)
|
||||
) {
|
||||
memcpy(path_md5_n, path_md5_str, MD5_STR_LENGTH - 1);
|
||||
zstd_write_string(path_md5_n, MD5_STR_LENGTH);
|
||||
}
|
||||
}
|
||||
|
||||
void incremental_delete(const char *del_filepath, const char* index_filepath,
|
||||
GHashTable *copy_table, GHashTable *new_table) {
|
||||
|
||||
if (WriterCtx.out_file == NULL) {
|
||||
initialize_writer_ctx(del_filepath);
|
||||
}
|
||||
|
||||
IncrementalCopyTable = copy_table;
|
||||
IncrementalNewTable = new_table;
|
||||
|
||||
read_index(index_filepath, "", INDEX_TYPE_NDJSON, incremental_delete_handle_doc);
|
||||
}
|
||||
|
||||
@@ -7,13 +7,23 @@
|
||||
#include <sys/syscall.h>
|
||||
#include <glib.h>
|
||||
|
||||
typedef struct line_processor {
|
||||
void* data;
|
||||
void (*func)(const char*, void*);
|
||||
} line_processor_t;
|
||||
|
||||
typedef void(*index_func)(cJSON *, const char[MD5_STR_LENGTH]);
|
||||
|
||||
void incremental_copy(store_t *store, store_t *dst_store, const char *filepath,
|
||||
const char *dst_filepath, GHashTable *copy_table);
|
||||
|
||||
void incremental_delete(const char *del_filepath, const char* index_filepath,
|
||||
GHashTable *copy_table, GHashTable *new_table);
|
||||
|
||||
void write_document(document_t *doc);
|
||||
|
||||
void read_lines(const char *path, const line_processor_t processor);
|
||||
|
||||
void read_index(const char *path, const char[MD5_STR_LENGTH], const char *type, index_func);
|
||||
|
||||
void incremental_read(GHashTable *table, const char *filepath, index_descriptor_t *desc);
|
||||
@@ -29,4 +39,18 @@ void write_index_descriptor(char *path, index_descriptor_t *desc);
|
||||
|
||||
index_descriptor_t read_index_descriptor(char *path);
|
||||
|
||||
// caller ensures char file_path[PATH_MAX]
|
||||
#define READ_INDICES(file_path, index_path, action_ok, action_main_fail, cond_original) \
|
||||
snprintf(file_path, PATH_MAX, "%s_index_main.ndjson.zst", index_path); \
|
||||
if (0 == access(file_path, R_OK)) { \
|
||||
action_ok; \
|
||||
} else { \
|
||||
action_main_fail; \
|
||||
} \
|
||||
snprintf(file_path, PATH_MAX, "%s_index_original.ndjson.zst", index_path); \
|
||||
if ((cond_original) && (0 == access(file_path, R_OK))) { \
|
||||
action_ok; \
|
||||
} \
|
||||
|
||||
|
||||
#endif
|
||||
@@ -23,7 +23,6 @@ store_t *store_create(const char *path, size_t chunk_size) {
|
||||
}
|
||||
|
||||
store->size = (size_t) store->chunk_size;
|
||||
ScanCtx.stat_tn_size = 0;
|
||||
mdb_env_set_mapsize(store->env, store->size);
|
||||
|
||||
// Open dbi
|
||||
@@ -56,7 +55,16 @@ void store_write(store_t *store, char *key, size_t key_len, char *buf, size_t bu
|
||||
if (key_len == MD5_DIGEST_LENGTH) {
|
||||
char path_md5_str[MD5_STR_LENGTH];
|
||||
buf2hex((unsigned char *) key, MD5_DIGEST_LENGTH, path_md5_str);
|
||||
|
||||
LOG_DEBUGF("store.c", "Store write {%s} %lu bytes", path_md5_str, buf_len)
|
||||
|
||||
} else if (key_len == MD5_DIGEST_LENGTH + sizeof(int)) {
|
||||
char path_md5_str[MD5_STR_LENGTH];
|
||||
buf2hex((unsigned char *) key, MD5_DIGEST_LENGTH, path_md5_str);
|
||||
|
||||
LOG_DEBUGF("store.c", "Store write {%s/%d} %lu bytes",
|
||||
path_md5_str, *(int *) (key + MD5_DIGEST_LENGTH), buf_len);
|
||||
|
||||
} else {
|
||||
LOG_DEBUGF("store.c", "Store write {%s} %lu bytes", key, buf_len)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
#include <ftw.h>
|
||||
|
||||
#define STR_STARTS_WITH(x, y) (strncmp(y, x, strlen(y) - 1) == 0)
|
||||
|
||||
__always_inline
|
||||
parse_job_t *create_fs_parse_job(const char *filepath, const struct stat *info, int base) {
|
||||
int len = (int) strlen(filepath);
|
||||
@@ -43,26 +45,91 @@ int sub_strings[30];
|
||||
|
||||
int handle_entry(const char *filepath, const struct stat *info, int typeflag, struct FTW *ftw) {
|
||||
|
||||
if (typeflag == FTW_F && S_ISREG(info->st_mode) && ftw->level <= ScanCtx.depth) {
|
||||
if (ftw->level > ScanCtx.depth) {
|
||||
if (typeflag == FTW_D) {
|
||||
return FTW_SKIP_SUBTREE;
|
||||
}
|
||||
return FTW_CONTINUE;
|
||||
}
|
||||
|
||||
if (ScanCtx.exclude != NULL && EXCLUDED(filepath)) {
|
||||
LOG_DEBUGF("walk.c", "Excluded: %s", filepath)
|
||||
|
||||
if (typeflag == FTW_F && S_ISREG(info->st_mode)) {
|
||||
pthread_mutex_lock(&ScanCtx.dbg_file_counts_mu);
|
||||
ScanCtx.dbg_excluded_files_count += 1;
|
||||
pthread_mutex_unlock(&ScanCtx.dbg_file_counts_mu);
|
||||
return 0;
|
||||
} else if (typeflag == FTW_D) {
|
||||
return FTW_SKIP_SUBTREE;
|
||||
}
|
||||
|
||||
return FTW_CONTINUE;
|
||||
}
|
||||
|
||||
if (typeflag == FTW_F && S_ISREG(info->st_mode)) {
|
||||
parse_job_t *job = create_fs_parse_job(filepath, info, ftw->base);
|
||||
tpool_add_work(ScanCtx.pool, parse, job);
|
||||
}
|
||||
|
||||
return 0;
|
||||
return FTW_CONTINUE;
|
||||
}
|
||||
|
||||
#define MAX_FILE_DESCRIPTORS 64
|
||||
|
||||
int walk_directory_tree(const char *dirpath) {
|
||||
return nftw(dirpath, handle_entry, MAX_FILE_DESCRIPTORS, FTW_PHYS | FTW_DEPTH);
|
||||
return nftw(dirpath, handle_entry, MAX_FILE_DESCRIPTORS, FTW_PHYS | FTW_ACTIONRETVAL);
|
||||
}
|
||||
|
||||
int iterate_file_list(void *input_file) {
|
||||
|
||||
char buf[PATH_MAX];
|
||||
struct stat info;
|
||||
|
||||
while (fgets(buf, sizeof(buf), input_file) != NULL) {
|
||||
|
||||
// Remove trailing newline
|
||||
*(buf + strlen(buf) - 1) = '\0';
|
||||
|
||||
int stat_ret = stat(buf, &info);
|
||||
|
||||
if (stat_ret != 0) {
|
||||
LOG_ERRORF("walk.c", "Could not stat file %s (%s)", buf, strerror(errno));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!S_ISREG(info.st_mode)) {
|
||||
LOG_ERRORF("walk.c", "Is not a regular file: %s", buf);
|
||||
continue;
|
||||
}
|
||||
|
||||
char *absolute_path = canonicalize_file_name(buf);
|
||||
|
||||
if (absolute_path == NULL) {
|
||||
LOG_FATALF("walk.c", "FIXME: Could not get absolute path of %s", buf);
|
||||
}
|
||||
|
||||
if (ScanCtx.exclude != NULL && EXCLUDED(absolute_path)) {
|
||||
LOG_DEBUGF("walk.c", "Excluded: %s", absolute_path)
|
||||
|
||||
if (S_ISREG(info.st_mode)) {
|
||||
pthread_mutex_lock(&ScanCtx.dbg_file_counts_mu);
|
||||
ScanCtx.dbg_excluded_files_count += 1;
|
||||
pthread_mutex_unlock(&ScanCtx.dbg_file_counts_mu);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!STR_STARTS_WITH(absolute_path, ScanCtx.index.desc.root)) {
|
||||
LOG_FATALF("walk.c", "File is not a children of root folder (%s): %s", ScanCtx.index.desc.root, buf);
|
||||
}
|
||||
|
||||
int base = (int) (strrchr(buf, '/') - buf) + 1;
|
||||
|
||||
parse_job_t *job = create_fs_parse_job(absolute_path, &info, base);
|
||||
free(absolute_path);
|
||||
tpool_add_work(ScanCtx.pool, parse, job);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -5,4 +5,6 @@
|
||||
|
||||
int walk_directory_tree(const char *);
|
||||
|
||||
int iterate_file_list(void* input_file);
|
||||
|
||||
#endif
|
||||
|
||||
26
src/log.c
26
src/log.c
@@ -48,6 +48,12 @@ void vsist_logf(const char *filepath, int level, char *format, va_list ap) {
|
||||
size_t maxsize = sizeof(log_str) - log_len;
|
||||
log_len += vsnprintf(log_str + log_len, maxsize, format, ap);
|
||||
|
||||
if (log_len >= maxsize) {
|
||||
fprintf(stderr, "([%s] FIXME: Log string is too long to display: %dB)\n",
|
||||
log_levels[level], log_len);
|
||||
return;
|
||||
}
|
||||
|
||||
if (is_tty) {
|
||||
log_len += sprintf(log_str + log_len, "\033[0m\n");
|
||||
} else {
|
||||
@@ -55,10 +61,14 @@ void vsist_logf(const char *filepath, int level, char *format, va_list ap) {
|
||||
log_len += 1;
|
||||
}
|
||||
|
||||
int ret = write(STDERR_FILENO, log_str, log_len);
|
||||
if (ret == -1) {
|
||||
LOG_FATALF("serialize.c", "Could not write index descriptor: %s", strerror(errno))
|
||||
if (PrintingProgressBar) {
|
||||
PrintingProgressBar = FALSE;
|
||||
memmove(log_str + 1, log_str, log_len);
|
||||
log_str[0] = '\n';
|
||||
log_len += 1;
|
||||
}
|
||||
|
||||
write(STDERR_FILENO, log_str, log_len);
|
||||
}
|
||||
|
||||
void sist_logf(const char *filepath, int level, char *format, ...) {
|
||||
@@ -104,8 +114,12 @@ void sist_log(const char *filepath, int level, char *str) {
|
||||
);
|
||||
}
|
||||
|
||||
int ret = write(STDERR_FILENO, log_str, log_len);
|
||||
if (ret == -1) {
|
||||
LOG_FATALF("serialize.c", "Could not write index descriptor: %s", strerror(errno));
|
||||
if (PrintingProgressBar) {
|
||||
PrintingProgressBar = FALSE;
|
||||
memmove(log_str + 1, log_str, log_len);
|
||||
log_str[0] = '\n';
|
||||
log_len += 1;
|
||||
}
|
||||
|
||||
write(STDERR_FILENO, log_str, log_len);
|
||||
}
|
||||
|
||||
271
src/main.c
271
src/main.c
@@ -14,6 +14,9 @@
|
||||
#include "parsing/mime.h"
|
||||
#include "parsing/parse.h"
|
||||
|
||||
#include <signal.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "stats.h"
|
||||
|
||||
#define DESCRIPTION "Lightning-fast file system indexer and search tool."
|
||||
@@ -29,8 +32,6 @@ static const char *const usage[] = {
|
||||
NULL,
|
||||
};
|
||||
|
||||
#include<signal.h>
|
||||
#include<unistd.h>
|
||||
|
||||
static __sighandler_t sigsegv_handler = NULL;
|
||||
static __sighandler_t sigabrt_handler = NULL;
|
||||
@@ -169,6 +170,7 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
ScanCtx.dbg_current_files = g_hash_table_new_full(g_int64_hash, g_int64_equal, NULL, NULL);
|
||||
pthread_mutex_init(&ScanCtx.dbg_current_files_mu, NULL);
|
||||
pthread_mutex_init(&ScanCtx.dbg_file_counts_mu, NULL);
|
||||
pthread_mutex_init(&ScanCtx.copy_table_mu, NULL);
|
||||
|
||||
ScanCtx.calculate_checksums = args->calculate_checksums;
|
||||
|
||||
@@ -187,40 +189,50 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
ScanCtx.comic_ctx.log = _log;
|
||||
ScanCtx.comic_ctx.logf = _logf;
|
||||
ScanCtx.comic_ctx.store = _store;
|
||||
ScanCtx.comic_ctx.tn_size = args->size;
|
||||
ScanCtx.comic_ctx.tn_qscale = args->quality;
|
||||
ScanCtx.comic_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.comic_ctx.tn_size = args->tn_size;
|
||||
ScanCtx.comic_ctx.tn_qscale = args->tn_quality;
|
||||
ScanCtx.comic_ctx.cbr_mime = mime_get_mime_by_string(ScanCtx.mime_table, "application/x-cbr");
|
||||
ScanCtx.comic_ctx.cbz_mime = mime_get_mime_by_string(ScanCtx.mime_table, "application/x-cbz");
|
||||
|
||||
// Ebook
|
||||
pthread_mutex_init(&ScanCtx.ebook_ctx.mupdf_mutex, NULL);
|
||||
ScanCtx.ebook_ctx.content_size = args->content_size;
|
||||
ScanCtx.ebook_ctx.tn_size = args->size;
|
||||
ScanCtx.ebook_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.ebook_ctx.tn_size = args->tn_size;
|
||||
ScanCtx.ebook_ctx.tesseract_lang = args->tesseract_lang;
|
||||
ScanCtx.ebook_ctx.tesseract_path = args->tesseract_path;
|
||||
ScanCtx.ebook_ctx.log = _log;
|
||||
ScanCtx.ebook_ctx.logf = _logf;
|
||||
ScanCtx.ebook_ctx.store = _store;
|
||||
ScanCtx.ebook_ctx.fast_epub_parse = args->fast_epub;
|
||||
ScanCtx.ebook_ctx.tn_qscale = args->quality;
|
||||
ScanCtx.ebook_ctx.tn_qscale = args->tn_quality;
|
||||
|
||||
// Font
|
||||
ScanCtx.font_ctx.enable_tn = args->size > 0;
|
||||
ScanCtx.font_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.font_ctx.log = _log;
|
||||
ScanCtx.font_ctx.logf = _logf;
|
||||
ScanCtx.font_ctx.store = _store;
|
||||
|
||||
// Media
|
||||
ScanCtx.media_ctx.tn_qscale = args->quality;
|
||||
ScanCtx.media_ctx.tn_size = args->size;
|
||||
ScanCtx.media_ctx.tn_qscale = args->tn_quality;
|
||||
ScanCtx.media_ctx.tn_size = args->tn_size;
|
||||
ScanCtx.media_ctx.tn_count = args->tn_count;
|
||||
ScanCtx.media_ctx.log = _log;
|
||||
ScanCtx.media_ctx.logf = _logf;
|
||||
ScanCtx.media_ctx.store = _store;
|
||||
ScanCtx.media_ctx.max_media_buffer = (long) args->max_memory_buffer * 1024 * 1024;
|
||||
ScanCtx.media_ctx.max_media_buffer = (long) args->max_memory_buffer_mib * 1024 * 1024;
|
||||
ScanCtx.media_ctx.read_subtitles = args->read_subtitles;
|
||||
ScanCtx.media_ctx.read_subtitles = args->tn_count;
|
||||
|
||||
if (args->ocr_images) {
|
||||
ScanCtx.media_ctx.tesseract_lang = args->tesseract_lang;
|
||||
ScanCtx.media_ctx.tesseract_path = args->tesseract_path;
|
||||
}
|
||||
init_media();
|
||||
|
||||
// OOXML
|
||||
ScanCtx.ooxml_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.ooxml_ctx.content_size = args->content_size;
|
||||
ScanCtx.ooxml_ctx.log = _log;
|
||||
ScanCtx.ooxml_ctx.logf = _logf;
|
||||
@@ -237,7 +249,8 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
ScanCtx.text_ctx.logf = _logf;
|
||||
|
||||
// MSDOC
|
||||
ScanCtx.msdoc_ctx.tn_size = args->size;
|
||||
ScanCtx.msdoc_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.msdoc_ctx.tn_size = args->tn_size;
|
||||
ScanCtx.msdoc_ctx.content_size = args->content_size;
|
||||
ScanCtx.msdoc_ctx.log = _log;
|
||||
ScanCtx.msdoc_ctx.logf = _logf;
|
||||
@@ -246,6 +259,7 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
|
||||
ScanCtx.threads = args->threads;
|
||||
ScanCtx.depth = args->depth;
|
||||
ScanCtx.mem_limit = (size_t) args->scan_mem_limit_mib * 1024 * 1024;
|
||||
|
||||
strncpy(ScanCtx.index.path, args->output, sizeof(ScanCtx.index.path));
|
||||
strncpy(ScanCtx.index.desc.name, args->name, sizeof(ScanCtx.index.desc.name));
|
||||
@@ -255,8 +269,9 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
ScanCtx.fast = args->fast;
|
||||
|
||||
// Raw
|
||||
ScanCtx.raw_ctx.tn_qscale = args->quality;
|
||||
ScanCtx.raw_ctx.tn_size = args->size;
|
||||
ScanCtx.raw_ctx.tn_qscale = args->tn_quality;
|
||||
ScanCtx.raw_ctx.enable_tn = args->tn_count > 0;
|
||||
ScanCtx.raw_ctx.tn_size = args->tn_size;
|
||||
ScanCtx.raw_ctx.log = _log;
|
||||
ScanCtx.raw_ctx.logf = _logf;
|
||||
ScanCtx.raw_ctx.store = _store;
|
||||
@@ -275,37 +290,87 @@ void initialize_scan_context(scan_args_t *args) {
|
||||
ScanCtx.json_ctx.ndjson_mime = mime_get_mime_by_string(ScanCtx.mime_table, "application/ndjson");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Loads an existing index as the baseline for incremental scanning.
|
||||
* 1. load old index files (original+main) => original_table
|
||||
* 2. allocate empty table => copy_table
|
||||
* 3. allocate empty table => new_table
|
||||
* the original_table/copy_table/new_table will be populated in parsing/parse.c:parse
|
||||
* and consumed in main.c:save_incremental_index
|
||||
*
|
||||
* Note: the existing index may or may not be of incremental index form.
|
||||
*/
|
||||
void load_incremental_index(const scan_args_t *args) {
|
||||
char file_path[PATH_MAX];
|
||||
|
||||
ScanCtx.original_table = incremental_get_table();
|
||||
ScanCtx.copy_table = incremental_get_table();
|
||||
|
||||
DIR *dir = opendir(args->incremental);
|
||||
if (dir == NULL) {
|
||||
LOG_FATALF("main.c", "Could not open original index for incremental scan: %s", strerror(errno))
|
||||
}
|
||||
ScanCtx.new_table = incremental_get_table();
|
||||
|
||||
char descriptor_path[PATH_MAX];
|
||||
snprintf(descriptor_path, PATH_MAX, "%s/descriptor.json", args->incremental);
|
||||
snprintf(descriptor_path, PATH_MAX, "%sdescriptor.json", args->incremental);
|
||||
index_descriptor_t original_desc = read_index_descriptor(descriptor_path);
|
||||
|
||||
if (strcmp(original_desc.version, Version) != 0) {
|
||||
LOG_FATALF("main.c", "Version mismatch! Index is %s but executable is %s", original_desc.version, Version)
|
||||
}
|
||||
|
||||
struct dirent *de;
|
||||
while ((de = readdir(dir)) != NULL) {
|
||||
if (strncmp(de->d_name, "_index", sizeof("_index") - 1) == 0) {
|
||||
char file_path[PATH_MAX];
|
||||
snprintf(file_path, PATH_MAX, "%s%s", args->incremental, de->d_name);
|
||||
incremental_read(ScanCtx.original_table, file_path, &original_desc);
|
||||
}
|
||||
}
|
||||
closedir(dir);
|
||||
READ_INDICES(file_path, args->incremental, incremental_read(ScanCtx.original_table, file_path, &original_desc),
|
||||
LOG_FATALF("main.c", "Could not open original main index for incremental scan: %s", strerror(errno)),
|
||||
1);
|
||||
|
||||
LOG_INFOF("main.c", "Loaded %d items in to mtime table.", g_hash_table_size(ScanCtx.original_table))
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an incremental index.
|
||||
* Before calling this function, the scanner should have finished writing the main index.
|
||||
* 1. Build original_table - new_table => delete_table
|
||||
* 2. Incrementally copy from old index files [(original+main) /\ copy_table] => index_original.ndjson.zst & store
|
||||
*/
|
||||
void save_incremental_index(scan_args_t *args) {
|
||||
char dst_path[PATH_MAX];
|
||||
char store_path[PATH_MAX];
|
||||
char file_path[PATH_MAX];
|
||||
char del_path[PATH_MAX];
|
||||
snprintf(store_path, PATH_MAX, "%sthumbs", args->incremental);
|
||||
snprintf(dst_path, PATH_MAX, "%s_index_original.ndjson.zst", ScanCtx.index.path);
|
||||
store_t *source = store_create(store_path, STORE_SIZE_TN);
|
||||
|
||||
LOG_INFOF("main.c", "incremental_delete: original size = %u, copy size = %u, new size = %u",
|
||||
g_hash_table_size(ScanCtx.original_table),
|
||||
g_hash_table_size(ScanCtx.copy_table),
|
||||
g_hash_table_size(ScanCtx.new_table));
|
||||
snprintf(del_path, PATH_MAX, "%s_index_delete.list.zst", ScanCtx.index.path);
|
||||
READ_INDICES(file_path, args->incremental,
|
||||
incremental_delete(del_path, file_path, ScanCtx.copy_table, ScanCtx.new_table),
|
||||
perror("incremental_delete"), 1);
|
||||
writer_cleanup();
|
||||
|
||||
READ_INDICES(file_path, args->incremental,
|
||||
incremental_copy(source, ScanCtx.index.store, file_path, dst_path, ScanCtx.copy_table),
|
||||
perror("incremental_copy"), 1);
|
||||
writer_cleanup();
|
||||
|
||||
store_destroy(source);
|
||||
|
||||
snprintf(store_path, PATH_MAX, "%stags", args->incremental);
|
||||
snprintf(dst_path, PATH_MAX, "%stags", ScanCtx.index.path);
|
||||
store_t *source_tags = store_create(store_path, STORE_SIZE_TAG);
|
||||
store_copy(source_tags, dst_path);
|
||||
store_destroy(source_tags);
|
||||
}
|
||||
|
||||
/**
|
||||
* An index can be either incremental or non-incremental (initial index).
|
||||
* For an initial index, there is only the "main" index.
|
||||
* For an incremental index, there are, additionally:
|
||||
* - An "original" index, referencing all files unchanged since the previous index.
|
||||
* - A "delete" index, referencing all files that exist in the previous index, but deleted since then.
|
||||
* Therefore, for an incremental index, "main"+"original" covers all the current files in the live filesystem,
|
||||
* and is orthognal with the "delete" index. When building an incremental index upon an old incremental index,
|
||||
* the old "delete" index can be safely ignored.
|
||||
*/
|
||||
void sist2_scan(scan_args_t *args) {
|
||||
|
||||
ScanCtx.mime_table = mime_get_mime_table();
|
||||
@@ -328,16 +393,26 @@ void sist2_scan(scan_args_t *args) {
|
||||
load_incremental_index(args);
|
||||
}
|
||||
|
||||
ScanCtx.pool = tpool_create(args->threads, thread_cleanup, TRUE, TRUE);
|
||||
ScanCtx.pool = tpool_create(ScanCtx.threads, thread_cleanup, TRUE, TRUE, ScanCtx.mem_limit);
|
||||
tpool_start(ScanCtx.pool);
|
||||
|
||||
ScanCtx.writer_pool = tpool_create(1, writer_cleanup, TRUE, FALSE);
|
||||
ScanCtx.writer_pool = tpool_create(1, writer_cleanup, TRUE, FALSE, 0);
|
||||
tpool_start(ScanCtx.writer_pool);
|
||||
|
||||
if (args->list_path) {
|
||||
// Scan using file list
|
||||
int list_ret = iterate_file_list(args->list_file);
|
||||
if (list_ret != 0) {
|
||||
LOG_FATALF("main.c", "iterate_file_list() failed! (%d)", list_ret)
|
||||
}
|
||||
} else {
|
||||
// Scan directory recursively
|
||||
int walk_ret = walk_directory_tree(ScanCtx.index.desc.root);
|
||||
if (walk_ret == -1) {
|
||||
LOG_FATALF("main.c", "walk_directory_tree() failed! %s (%d)", strerror(errno), errno)
|
||||
}
|
||||
}
|
||||
|
||||
tpool_wait(ScanCtx.pool);
|
||||
tpool_destroy(ScanCtx.pool);
|
||||
|
||||
@@ -347,35 +422,11 @@ void sist2_scan(scan_args_t *args) {
|
||||
LOG_DEBUGF("main.c", "Skipped files: %d", ScanCtx.dbg_skipped_files_count)
|
||||
LOG_DEBUGF("main.c", "Excluded files: %d", ScanCtx.dbg_excluded_files_count)
|
||||
LOG_DEBUGF("main.c", "Failed files: %d", ScanCtx.dbg_failed_files_count)
|
||||
LOG_DEBUGF("main.c", "Thumbnail store size: %d", ScanCtx.stat_tn_size)
|
||||
LOG_DEBUGF("main.c", "Index size: %d", ScanCtx.stat_index_size)
|
||||
|
||||
if (args->incremental != NULL) {
|
||||
char dst_path[PATH_MAX];
|
||||
snprintf(store_path, PATH_MAX, "%sthumbs", args->incremental);
|
||||
snprintf(dst_path, PATH_MAX, "%s_index_original.ndjson.zst", ScanCtx.index.path);
|
||||
store_t *source = store_create(store_path, STORE_SIZE_TN);
|
||||
|
||||
DIR *dir = opendir(args->incremental);
|
||||
if (dir == NULL) {
|
||||
perror("opendir");
|
||||
return;
|
||||
}
|
||||
struct dirent *de;
|
||||
while ((de = readdir(dir)) != NULL) {
|
||||
if (strncmp(de->d_name, "_index_", sizeof("_index_") - 1) == 0) {
|
||||
char file_path[PATH_MAX];
|
||||
snprintf(file_path, PATH_MAX, "%s%s", args->incremental, de->d_name);
|
||||
incremental_copy(source, ScanCtx.index.store, file_path, dst_path, ScanCtx.copy_table);
|
||||
}
|
||||
}
|
||||
closedir(dir);
|
||||
store_destroy(source);
|
||||
writer_cleanup();
|
||||
|
||||
snprintf(store_path, PATH_MAX, "%stags", args->incremental);
|
||||
snprintf(dst_path, PATH_MAX, "%stags", ScanCtx.index.path);
|
||||
store_t *source_tags = store_create(store_path, STORE_SIZE_TAG);
|
||||
store_copy(source_tags, dst_path);
|
||||
store_destroy(source_tags);
|
||||
save_incremental_index(args);
|
||||
}
|
||||
|
||||
generate_stats(&ScanCtx.index, args->treemap_threshold, ScanCtx.index.path);
|
||||
@@ -385,17 +436,19 @@ void sist2_scan(scan_args_t *args) {
|
||||
}
|
||||
|
||||
void sist2_index(index_args_t *args) {
|
||||
char file_path[PATH_MAX];
|
||||
|
||||
IndexCtx.es_url = args->es_url;
|
||||
IndexCtx.es_index = args->es_index;
|
||||
IndexCtx.batch_size = args->batch_size;
|
||||
IndexCtx.needs_es_connection = !args->print;
|
||||
|
||||
if (!args->print) {
|
||||
if (IndexCtx.needs_es_connection) {
|
||||
elastic_init(args->force_reset, args->es_mappings, args->es_settings);
|
||||
}
|
||||
|
||||
char descriptor_path[PATH_MAX];
|
||||
snprintf(descriptor_path, PATH_MAX, "%s/descriptor.json", args->index_path);
|
||||
snprintf(descriptor_path, PATH_MAX, "%sdescriptor.json", args->index_path);
|
||||
|
||||
index_descriptor_t desc = read_index_descriptor(descriptor_path);
|
||||
|
||||
@@ -411,11 +464,11 @@ void sist2_index(index_args_t *args) {
|
||||
}
|
||||
|
||||
char path_tmp[PATH_MAX];
|
||||
snprintf(path_tmp, sizeof(path_tmp), "%s/tags", args->index_path);
|
||||
snprintf(path_tmp, sizeof(path_tmp), "%stags", args->index_path);
|
||||
IndexCtx.tag_store = store_create(path_tmp, STORE_SIZE_TAG);
|
||||
IndexCtx.tags = store_read_all(IndexCtx.tag_store);
|
||||
|
||||
snprintf(path_tmp, sizeof(path_tmp), "%s/meta", args->index_path);
|
||||
snprintf(path_tmp, sizeof(path_tmp), "%smeta", args->index_path);
|
||||
IndexCtx.meta_store = store_create(path_tmp, STORE_SIZE_META);
|
||||
IndexCtx.meta = store_read_all(IndexCtx.meta_store);
|
||||
|
||||
@@ -426,32 +479,33 @@ void sist2_index(index_args_t *args) {
|
||||
f = index_json;
|
||||
}
|
||||
|
||||
void (*cleanup)();
|
||||
if (args->print) {
|
||||
cleanup = NULL;
|
||||
} else {
|
||||
cleanup = elastic_cleanup;
|
||||
}
|
||||
|
||||
IndexCtx.pool = tpool_create(args->threads, cleanup, FALSE, FALSE);
|
||||
IndexCtx.pool = tpool_create(args->threads, elastic_cleanup, FALSE, args->print == 0, 0);
|
||||
tpool_start(IndexCtx.pool);
|
||||
|
||||
struct dirent *de;
|
||||
while ((de = readdir(dir)) != NULL) {
|
||||
if (strncmp(de->d_name, "_index_", sizeof("_index_") - 1) == 0) {
|
||||
char file_path[PATH_MAX];
|
||||
snprintf(file_path, PATH_MAX, "%s/%s", args->index_path, de->d_name);
|
||||
READ_INDICES(file_path, args->index_path, {
|
||||
read_index(file_path, desc.id, desc.type, f);
|
||||
LOG_DEBUGF("main.c", "Read index file %s (%s)", file_path, desc.type);
|
||||
}, {}, !args->incremental);
|
||||
|
||||
// Only read the _delete index if we're sending data to ES
|
||||
if (!args->print) {
|
||||
snprintf(file_path, PATH_MAX, "%s_index_delete.list.zst", args->index_path);
|
||||
if (0 == access(file_path, R_OK)) {
|
||||
read_lines(file_path, (line_processor_t) {
|
||||
.data = NULL,
|
||||
.func = delete_document
|
||||
});
|
||||
LOG_DEBUGF("main.c", "Read index file %s (%s)", file_path, desc.type)
|
||||
}
|
||||
}
|
||||
|
||||
closedir(dir);
|
||||
|
||||
tpool_wait(IndexCtx.pool);
|
||||
|
||||
tpool_destroy(IndexCtx.pool);
|
||||
|
||||
if (!args->print) {
|
||||
if (IndexCtx.needs_es_connection) {
|
||||
finish_indexer(args->script, args->async_script, desc.id);
|
||||
}
|
||||
|
||||
@@ -466,7 +520,7 @@ void sist2_exec_script(exec_args_t *args) {
|
||||
LogCtx.verbose = TRUE;
|
||||
|
||||
char descriptor_path[PATH_MAX];
|
||||
snprintf(descriptor_path, PATH_MAX, "%s/descriptor.json", args->index_path);
|
||||
snprintf(descriptor_path, PATH_MAX, "%sdescriptor.json", args->index_path);
|
||||
index_descriptor_t desc = read_index_descriptor(descriptor_path);
|
||||
|
||||
IndexCtx.es_url = args->es_url;
|
||||
@@ -489,7 +543,7 @@ void sist2_web(web_args_t *args) {
|
||||
WebCtx.tag_auth_enabled = args->tag_auth_enabled;
|
||||
WebCtx.tagline = args->tagline;
|
||||
WebCtx.dev = args->dev;
|
||||
strcpy(WebCtx.lang, "en");
|
||||
strcpy(WebCtx.lang, args->lang);
|
||||
|
||||
for (int i = 0; i < args->index_count; i++) {
|
||||
char *abs_path = abspath(args->indices[i]);
|
||||
@@ -509,17 +563,38 @@ void sist2_web(web_args_t *args) {
|
||||
WebCtx.indices[i].desc = read_index_descriptor(path_tmp);
|
||||
|
||||
strcpy(WebCtx.indices[i].path, abs_path);
|
||||
printf("Loaded index: %s\n", WebCtx.indices[i].desc.name);
|
||||
LOG_INFOF("main.c", "Loaded index: [%s]", WebCtx.indices[i].desc.name)
|
||||
free(abs_path);
|
||||
}
|
||||
|
||||
serve(args->listen_address);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback to handle options such that
|
||||
*
|
||||
* Unspecified -> 0: Set to default value
|
||||
* Specified "0" -> -1: Disable the option (ex. don't generate thumbnails)
|
||||
* Negative number -> Raise error
|
||||
* Specified a valid number -> Continue as normal
|
||||
*/
|
||||
int set_to_negative_if_value_is_zero(struct argparse *self, const struct argparse_option *option) {
|
||||
int specified_value = *(int *) option->value;
|
||||
|
||||
if (specified_value == 0) {
|
||||
*((int *) option->data) = OPTION_VALUE_DISABLE;
|
||||
}
|
||||
|
||||
if (specified_value < 0) {
|
||||
fprintf(stderr, "error: option `--%s` Value must be >= 0\n", option->long_name);
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int main(int argc, const char *argv[]) {
|
||||
// sigsegv_handler = signal(SIGSEGV, sig_handler);
|
||||
// sigabrt_handler = signal(SIGABRT, sig_handler);
|
||||
sigsegv_handler = signal(SIGSEGV, sig_handler);
|
||||
sigabrt_handler = signal(SIGABRT, sig_handler);
|
||||
|
||||
setlocale(LC_ALL, "");
|
||||
|
||||
@@ -545,12 +620,21 @@ int main(int argc, const char *argv[]) {
|
||||
|
||||
OPT_GROUP("Scan options"),
|
||||
OPT_INTEGER('t', "threads", &common_threads, "Number of threads. DEFAULT=1"),
|
||||
OPT_FLOAT('q', "quality", &scan_args->quality,
|
||||
"Thumbnail quality, on a scale of 1.0 to 31.0, 1.0 being the best. DEFAULT=3"),
|
||||
OPT_INTEGER(0, "size", &scan_args->size,
|
||||
"Thumbnail size, in pixels. Use negative value to disable. DEFAULT=500"),
|
||||
OPT_INTEGER(0, "mem-throttle", &scan_args->scan_mem_limit_mib,
|
||||
"Total memory threshold in MiB for scan throttling. DEFAULT=0",
|
||||
set_to_negative_if_value_is_zero, (intptr_t) &scan_args->scan_mem_limit_mib),
|
||||
OPT_FLOAT('q', "thumbnail-quality", &scan_args->tn_quality,
|
||||
"Thumbnail quality, on a scale of 1.0 to 31.0, 1.0 being the best. DEFAULT=1",
|
||||
set_to_negative_if_value_is_zero, (intptr_t) &scan_args->tn_quality),
|
||||
OPT_INTEGER(0, "thumbnail-size", &scan_args->tn_size,
|
||||
"Thumbnail size, in pixels. DEFAULT=500",
|
||||
set_to_negative_if_value_is_zero, (intptr_t) &scan_args->tn_size),
|
||||
OPT_INTEGER(0, "thumbnail-count", &scan_args->tn_count,
|
||||
"Number of thumbnails to generate. Set a value > 1 to create video previews, set to 0 to disable thumbnails. DEFAULT=1",
|
||||
set_to_negative_if_value_is_zero, (intptr_t) &scan_args->tn_count),
|
||||
OPT_INTEGER(0, "content-size", &scan_args->content_size,
|
||||
"Number of bytes to be extracted from text documents. Use negative value to disable. DEFAULT=32768"),
|
||||
"Number of bytes to be extracted from text documents. Set to 0 to disable. DEFAULT=32768",
|
||||
set_to_negative_if_value_is_zero, (intptr_t) &scan_args->content_size),
|
||||
OPT_STRING(0, "incremental", &scan_args->incremental,
|
||||
"Reuse an existing index and only scan modified files."),
|
||||
OPT_STRING('o', "output", &scan_args->output, "Output directory. DEFAULT=index.sist2/"),
|
||||
@@ -564,25 +648,33 @@ int main(int argc, const char *argv[]) {
|
||||
OPT_STRING(0, "archive-passphrase", &scan_args->archive_passphrase,
|
||||
"Passphrase for encrypted archive files"),
|
||||
|
||||
OPT_STRING(0, "ocr", &scan_args->tesseract_lang, "Tesseract language (use tesseract --list-langs to see "
|
||||
OPT_STRING(0, "ocr-lang", &scan_args->tesseract_lang,
|
||||
"Tesseract language (use 'tesseract --list-langs' to see "
|
||||
"which are installed on your machine)"),
|
||||
OPT_BOOLEAN(0, "ocr-images", &scan_args->ocr_images, "Enable OCR'ing of image files."),
|
||||
OPT_BOOLEAN(0, "ocr-ebooks", &scan_args->ocr_ebooks, "Enable OCR'ing of ebook files."),
|
||||
OPT_STRING('e', "exclude", &scan_args->exclude_regex, "Files that match this regex will not be scanned"),
|
||||
OPT_BOOLEAN(0, "fast", &scan_args->fast, "Only index file names & mime type"),
|
||||
OPT_STRING(0, "treemap-threshold", &scan_args->treemap_threshold_str, "Relative size threshold for treemap "
|
||||
"(see USAGE.md). DEFAULT: 0.0005"),
|
||||
OPT_INTEGER(0, "mem-buffer", &scan_args->max_memory_buffer,
|
||||
"Maximum memory buffer size per thread in MB for files inside archives "
|
||||
OPT_INTEGER(0, "mem-buffer", &scan_args->max_memory_buffer_mib,
|
||||
"Maximum memory buffer size per thread in MiB for files inside archives "
|
||||
"(see USAGE.md). DEFAULT: 2000"),
|
||||
OPT_BOOLEAN(0, "read-subtitles", &scan_args->read_subtitles, "Read subtitles from media files."),
|
||||
OPT_BOOLEAN(0, "fast-epub", &scan_args->fast_epub,
|
||||
"Faster but less accurate EPUB parsing (no thumbnails, metadata)"),
|
||||
OPT_BOOLEAN(0, "checksums", &scan_args->calculate_checksums, "Calculate file checksums when scanning."),
|
||||
OPT_STRING(0, "list-file", &scan_args->list_path, "Specify a list of newline-delimited paths to be scanned"
|
||||
" instead of normal directory traversal. Use '-' to read"
|
||||
" from stdin."),
|
||||
|
||||
OPT_GROUP("Index options"),
|
||||
OPT_INTEGER('t', "threads", &common_threads, "Number of threads. DEFAULT=1"),
|
||||
OPT_STRING(0, "es-url", &common_es_url, "Elasticsearch url with port. DEFAULT=http://localhost:9200"),
|
||||
OPT_STRING(0, "es-index", &common_es_index, "Elasticsearch index name. DEFAULT=sist2"),
|
||||
OPT_BOOLEAN('p', "print", &index_args->print, "Just print JSON documents to stdout."),
|
||||
OPT_BOOLEAN(0, "incremental-index", &index_args->incremental,
|
||||
"Conduct incremental indexing. Assumes that the old index is already ingested in Elasticsearch."),
|
||||
OPT_STRING(0, "script-file", &common_script_path, "Path to user script."),
|
||||
OPT_STRING(0, "mappings-file", &index_args->es_mappings_path, "Path to Elasticsearch mappings."),
|
||||
OPT_STRING(0, "settings-file", &index_args->es_settings_path, "Path to Elasticsearch settings."),
|
||||
@@ -599,6 +691,7 @@ int main(int argc, const char *argv[]) {
|
||||
OPT_STRING(0, "tag-auth", &web_args->tag_credentials, "Basic auth in user:password format for tagging"),
|
||||
OPT_STRING(0, "tagline", &web_args->tagline, "Tagline in navbar"),
|
||||
OPT_BOOLEAN(0, "dev", &web_args->dev, "Serve html & js files from disk (for development)"),
|
||||
OPT_STRING(0, "lang", &web_args->lang, "Default UI language. Can be changed by the user"),
|
||||
|
||||
OPT_GROUP("Exec-script options"),
|
||||
OPT_STRING(0, "es-url", &common_es_url, "Elasticsearch url. DEFAULT=http://localhost:9200"),
|
||||
|
||||
@@ -79,15 +79,27 @@ void parse(void *arg) {
|
||||
|
||||
int inc_ts = incremental_get(ScanCtx.original_table, doc->path_md5);
|
||||
if (inc_ts != 0 && inc_ts == job->vfile.info.st_mtim.tv_sec) {
|
||||
incremental_mark_file_for_copy(ScanCtx.copy_table, doc->path_md5);
|
||||
pthread_mutex_lock(&ScanCtx.copy_table_mu);
|
||||
incremental_mark_file(ScanCtx.copy_table, doc->path_md5);
|
||||
pthread_mutex_unlock(&ScanCtx.copy_table_mu);
|
||||
|
||||
pthread_mutex_lock(&ScanCtx.dbg_file_counts_mu);
|
||||
ScanCtx.dbg_skipped_files_count += 1;
|
||||
pthread_mutex_unlock(&ScanCtx.dbg_file_counts_mu);
|
||||
|
||||
CLOSE_FILE(job->vfile)
|
||||
free(doc->filepath);
|
||||
free(doc);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (ScanCtx.new_table != NULL) {
|
||||
pthread_mutex_lock(&ScanCtx.copy_table_mu);
|
||||
incremental_mark_file(ScanCtx.new_table, doc->path_md5);
|
||||
pthread_mutex_unlock(&ScanCtx.copy_table_mu);
|
||||
}
|
||||
|
||||
char *buf[MAGIC_BUF_SIZE];
|
||||
|
||||
if (LogCtx.very_verbose) {
|
||||
@@ -121,11 +133,14 @@ void parse(void *arg) {
|
||||
LOG_ERRORF(job->filepath, "(virtual) read(): [%d] %s", bytes_read, archive_error_string(job->vfile.arc))
|
||||
}
|
||||
|
||||
CLOSE_FILE(job->vfile)
|
||||
|
||||
pthread_mutex_lock(&ScanCtx.dbg_file_counts_mu);
|
||||
ScanCtx.dbg_failed_files_count += 1;
|
||||
pthread_mutex_unlock(&ScanCtx.dbg_file_counts_mu);
|
||||
|
||||
CLOSE_FILE(job->vfile)
|
||||
free(doc->filepath);
|
||||
free(doc);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#ifndef SIST_H
|
||||
#define SIST_H
|
||||
|
||||
#define _GNU_SOURCE
|
||||
|
||||
#ifndef FALSE
|
||||
#define FALSE (0)
|
||||
#define BOOL int
|
||||
@@ -51,13 +53,15 @@
|
||||
#include <ctype.h>
|
||||
#include "git_hash.h"
|
||||
|
||||
#define VERSION "2.11.3"
|
||||
#define VERSION "2.11.7"
|
||||
static const char *const Version = VERSION;
|
||||
|
||||
#ifndef SIST_PLATFORM
|
||||
#define SIST_PLATFORM unknown
|
||||
#endif
|
||||
|
||||
#define EXPECTED_MONGOOSE_VERSION "7.3"
|
||||
|
||||
#define Q(x) #x
|
||||
#define QUOTE(x) Q(x)
|
||||
|
||||
|
||||
10
src/stats.c
10
src/stats.c
@@ -96,16 +96,8 @@ void fill_tables(cJSON *document, UNUSED(const char index_id[MD5_STR_LENGTH])) {
|
||||
}
|
||||
|
||||
void read_index_into_tables(index_t *index) {
|
||||
DIR *dir = opendir(index->path);
|
||||
struct dirent *de;
|
||||
while ((de = readdir(dir)) != NULL) {
|
||||
if (strncmp(de->d_name, "_index_", sizeof("_index_") - 1) == 0) {
|
||||
char file_path[PATH_MAX];
|
||||
snprintf(file_path, PATH_MAX, "%s%s", index->path, de->d_name);
|
||||
read_index(file_path, index->desc.id, index->desc.type, fill_tables);
|
||||
}
|
||||
}
|
||||
closedir(dir);
|
||||
READ_INDICES(file_path, index->path, read_index(file_path, index->desc.id, index->desc.type, fill_tables), {}, 1);
|
||||
}
|
||||
|
||||
static size_t rfind(const char *str, int c) {
|
||||
|
||||
70
src/tpool.c
70
src/tpool.c
@@ -28,6 +28,9 @@ typedef struct tpool {
|
||||
int work_cnt;
|
||||
int done_cnt;
|
||||
int busy_cnt;
|
||||
int throttle_stuck_cnt;
|
||||
size_t mem_limit;
|
||||
size_t page_size;
|
||||
|
||||
int free_arg;
|
||||
int stop;
|
||||
@@ -114,13 +117,44 @@ int tpool_add_work(tpool_t *pool, thread_func_t func, void *arg) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* see: https://github.com/htop-dev/htop/blob/f782f821f7f8081cb43bbad1c37f32830a260a81/linux/LinuxProcessList.c
|
||||
*/
|
||||
__always_inline
|
||||
static size_t _get_total_mem(tpool_t* pool) {
|
||||
FILE* statmfile = fopen("/proc/self/statm", "r");
|
||||
if (!statmfile)
|
||||
return 0;
|
||||
|
||||
long int dummy, dummy2, dummy3, dummy4, dummy5, dummy6;
|
||||
long int m_resident;
|
||||
|
||||
int r = fscanf(statmfile, "%ld %ld %ld %ld %ld %ld %ld",
|
||||
&dummy, /* m_virt */
|
||||
&m_resident,
|
||||
&dummy2, /* m_share */
|
||||
&dummy3, /* m_trs */
|
||||
&dummy4, /* unused since Linux 2.6; always 0 */
|
||||
&dummy5, /* m_drs */
|
||||
&dummy6); /* unused since Linux 2.6; always 0 */
|
||||
fclose(statmfile);
|
||||
|
||||
if (r == 7) {
|
||||
return m_resident * pool->page_size;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Thread worker function
|
||||
*/
|
||||
static void *tpool_worker(void *arg) {
|
||||
tpool_t *pool = arg;
|
||||
int stuck_notified = 0;
|
||||
int throttle_ms = 0;
|
||||
|
||||
while (1) {
|
||||
while (TRUE) {
|
||||
pthread_mutex_lock(&pool->work_mutex);
|
||||
if (pool->stop) {
|
||||
break;
|
||||
@@ -138,10 +172,35 @@ static void *tpool_worker(void *arg) {
|
||||
pthread_mutex_unlock(&(pool->work_mutex));
|
||||
|
||||
if (work != NULL) {
|
||||
stuck_notified = 0;
|
||||
throttle_ms = 0;
|
||||
while(!pool->stop && pool->mem_limit > 0 && _get_total_mem(pool) >= pool->mem_limit) {
|
||||
if (!stuck_notified && throttle_ms >= 90000) {
|
||||
// notify the pool that this thread is stuck.
|
||||
pthread_mutex_lock(&(pool->work_mutex));
|
||||
pool->throttle_stuck_cnt += 1;
|
||||
if (pool->throttle_stuck_cnt == pool->thread_cnt) {
|
||||
LOG_ERROR("tpool.c", "Throttle memory limit too low, cannot proceed!");
|
||||
pool->stop = TRUE;
|
||||
}
|
||||
pthread_mutex_unlock(&(pool->work_mutex));
|
||||
stuck_notified = 1;
|
||||
}
|
||||
usleep(10000);
|
||||
throttle_ms += 10;
|
||||
}
|
||||
|
||||
if (pool->stop) {
|
||||
break;
|
||||
}
|
||||
|
||||
// we are not stuck anymore. cancel our notification.
|
||||
if (stuck_notified) {
|
||||
pthread_mutex_lock(&(pool->work_mutex));
|
||||
pool->throttle_stuck_cnt -= 1;
|
||||
pthread_mutex_unlock(&(pool->work_mutex));
|
||||
}
|
||||
|
||||
work->func(work->arg);
|
||||
if (pool->free_arg) {
|
||||
free(work->arg);
|
||||
@@ -177,7 +236,7 @@ static void *tpool_worker(void *arg) {
|
||||
}
|
||||
|
||||
void tpool_wait(tpool_t *pool) {
|
||||
LOG_INFO("tpool.c", "Waiting for worker threads to finish")
|
||||
LOG_DEBUG("tpool.c", "Waiting for worker threads to finish")
|
||||
pthread_mutex_lock(&(pool->work_mutex));
|
||||
while (TRUE) {
|
||||
if (pool->done_cnt < pool->work_cnt) {
|
||||
@@ -191,7 +250,9 @@ void tpool_wait(tpool_t *pool) {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (pool->print_progress) {
|
||||
progress_bar_print(1.0, ScanCtx.stat_tn_size, ScanCtx.stat_index_size);
|
||||
}
|
||||
pthread_mutex_unlock(&(pool->work_mutex));
|
||||
|
||||
LOG_INFO("tpool.c", "Worker threads finished")
|
||||
@@ -241,18 +302,21 @@ void tpool_destroy(tpool_t *pool) {
|
||||
* Create a thread pool
|
||||
* @param thread_cnt Worker threads count
|
||||
*/
|
||||
tpool_t *tpool_create(int thread_cnt, void cleanup_func(), int free_arg, int print_progress) {
|
||||
tpool_t *tpool_create(int thread_cnt, void cleanup_func(), int free_arg, int print_progress, size_t mem_limit) {
|
||||
|
||||
tpool_t *pool = malloc(sizeof(tpool_t));
|
||||
pool->thread_cnt = thread_cnt;
|
||||
pool->work_cnt = 0;
|
||||
pool->done_cnt = 0;
|
||||
pool->busy_cnt = 0;
|
||||
pool->throttle_stuck_cnt = 0;
|
||||
pool->mem_limit = mem_limit;
|
||||
pool->stop = FALSE;
|
||||
pool->free_arg = free_arg;
|
||||
pool->cleanup_func = cleanup_func;
|
||||
pool->threads = calloc(sizeof(pthread_t), thread_cnt);
|
||||
pool->print_progress = print_progress;
|
||||
pool->page_size = getpagesize();
|
||||
|
||||
pthread_mutex_init(&(pool->work_mutex), NULL);
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ typedef struct tpool tpool_t;
|
||||
|
||||
typedef void (*thread_func_t)(void *arg);
|
||||
|
||||
tpool_t *tpool_create(int num, void (*cleanup_func)(), int free_arg, int print_progress);
|
||||
tpool_t *tpool_create(int num, void (*cleanup_func)(), int free_arg, int print_progress, size_t mem_limit);
|
||||
void tpool_start(tpool_t *pool);
|
||||
void tpool_destroy(tpool_t *pool);
|
||||
|
||||
|
||||
16
src/util.c
16
src/util.c
@@ -84,11 +84,13 @@ char *expandpath(const char *path) {
|
||||
return expanded;
|
||||
}
|
||||
|
||||
int PrintingProgressBar = 0;
|
||||
|
||||
void progress_bar_print(double percentage, size_t tn_size, size_t index_size) {
|
||||
|
||||
static int last_val = -1;
|
||||
int val = (int) (percentage * 100);
|
||||
if (last_val == val || val > 100 || index_size < 1024) {
|
||||
if (last_val == val || val > 100) {
|
||||
return;
|
||||
}
|
||||
last_val = val;
|
||||
@@ -114,13 +116,21 @@ void progress_bar_print(double percentage, size_t tn_size, size_t index_size) {
|
||||
index_unit = 'M';
|
||||
}
|
||||
|
||||
printf(
|
||||
if (tn_size == 0 && index_size == 0) {
|
||||
fprintf(stderr,
|
||||
"\r%3d%%[%.*s>%*s]",
|
||||
val, lpad, PBSTR, rpad, ""
|
||||
);
|
||||
} else {
|
||||
fprintf(stderr,
|
||||
"\r%3d%%[%.*s>%*s] TN:%3d%c IDX:%3d%c",
|
||||
val, lpad, PBSTR, rpad, "",
|
||||
(int) tn_size, tn_unit,
|
||||
(int) index_size, index_unit
|
||||
);
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
PrintingProgressBar = TRUE;
|
||||
}
|
||||
|
||||
GHashTable *incremental_get_table() {
|
||||
|
||||
@@ -19,6 +19,8 @@ char *expandpath(const char *path);
|
||||
|
||||
dyn_buffer_t url_escape(char *str);
|
||||
|
||||
extern int PrintingProgressBar;
|
||||
|
||||
void progress_bar_print(double percentage, size_t tn_size, size_t index_size);
|
||||
|
||||
GHashTable *incremental_get_table();
|
||||
@@ -131,8 +133,12 @@ static int incremental_get_str(GHashTable *table, const char *path_md5) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks a file by adding it to a table.
|
||||
* !!Not thread safe.
|
||||
*/
|
||||
__always_inline
|
||||
static int incremental_mark_file_for_copy(GHashTable *table, const unsigned char path_md5[MD5_DIGEST_LENGTH]) {
|
||||
static int incremental_mark_file(GHashTable *table, const unsigned char path_md5[MD5_DIGEST_LENGTH]) {
|
||||
char *ptr = malloc(MD5_STR_LENGTH);
|
||||
buf2hex(path_md5, MD5_DIGEST_LENGTH, ptr);
|
||||
return g_hash_table_insert(table, ptr, GINT_TO_POINTER(1));
|
||||
|
||||
@@ -8,12 +8,16 @@
|
||||
|
||||
#include <src/ctx.h>
|
||||
|
||||
#define HTTP_SERVER_HEADER "Server: sist2/" VERSION "\r\n"
|
||||
#define HTTP_TEXT_TYPE_HEADER "Content-Type: text/plain;charset=utf-8\r\n"
|
||||
#define HTTP_REPLY_NOT_FOUND mg_http_reply(nc, 404, HTTP_SERVER_HEADER HTTP_TEXT_TYPE_HEADER, "Not found");
|
||||
|
||||
|
||||
static void send_response_line(struct mg_connection *nc, int status_code, size_t length, char *extra_headers) {
|
||||
mg_printf(
|
||||
nc,
|
||||
"HTTP/1.1 %d %s\r\n"
|
||||
"Server: sist2/" VERSION "\r\n"
|
||||
HTTP_SERVER_HEADER
|
||||
"Content-Length: %d\r\n"
|
||||
"%s\r\n\r\n",
|
||||
status_code, "OK",
|
||||
@@ -60,7 +64,7 @@ void search_index(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
void stats_files(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (hm->uri.len != MD5_STR_LENGTH + 4) {
|
||||
mg_http_reply(nc, 404, "", "");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -70,7 +74,7 @@ void stats_files(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
index_t *index = get_index_by_id(arg_md5);
|
||||
if (index == NULL) {
|
||||
mg_http_reply(nc, 404, "", "");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -138,11 +142,17 @@ void style_vendor(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
void thumbnail(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
int parse_tn_num = FALSE;
|
||||
|
||||
if (hm->uri.len != 68) {
|
||||
|
||||
if (hm->uri.len != 68 + 4) {
|
||||
LOG_DEBUGF("serve.c", "Invalid thumbnail path: %.*s", (int) hm->uri.len, hm->uri.ptr)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
parse_tn_num = TRUE;
|
||||
}
|
||||
|
||||
char arg_file_md5[MD5_STR_LENGTH];
|
||||
char arg_index[MD5_STR_LENGTH];
|
||||
@@ -158,12 +168,25 @@ void thumbnail(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
store_t *store = get_store(arg_index);
|
||||
if (store == NULL) {
|
||||
LOG_DEBUGF("serve.c", "Could not get store for index: %s", arg_index)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
char *data;
|
||||
size_t data_len = 0;
|
||||
char *data = store_read(store, (char *) md5_buf, sizeof(md5_buf), &data_len);
|
||||
|
||||
if (parse_tn_num) {
|
||||
int tn_num = atoi(hm->uri.ptr + 68);
|
||||
|
||||
char tn_key[sizeof(md5_buf) + sizeof(int)];
|
||||
memcpy(tn_key, md5_buf, sizeof(md5_buf));
|
||||
memcpy(tn_key + sizeof(md5_buf), &tn_num, sizeof(tn_num));
|
||||
|
||||
data = store_read(store, (char *) tn_key, sizeof(tn_key), &data_len);
|
||||
} else {
|
||||
data = store_read(store, (char *) md5_buf, sizeof(md5_buf), &data_len);
|
||||
}
|
||||
|
||||
if (data_len != 0) {
|
||||
send_response_line(
|
||||
nc, 200, data_len,
|
||||
@@ -173,7 +196,7 @@ void thumbnail(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
mg_send(nc, data, data_len);
|
||||
free(data);
|
||||
} else {
|
||||
mg_http_reply(nc, 404, "Content-Type: text/plain;charset=utf-8\r\n", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -182,7 +205,7 @@ void search(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (hm->body.len == 0) {
|
||||
LOG_DEBUG("serve.c", "Client sent empty body, ignoring request")
|
||||
mg_http_reply(nc, 500, "", "Invalid request");
|
||||
mg_http_reply(nc, 500, HTTP_SERVER_HEADER HTTP_TEXT_TYPE_HEADER, "Invalid request");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -226,6 +249,11 @@ void serve_file_from_url(cJSON *json, index_t *idx, struct mg_connection *nc) {
|
||||
|
||||
void serve_file_from_disk(cJSON *json, index_t *idx, struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (strcmp(MG_VERSION, EXPECTED_MONGOOSE_VERSION) != 0) {
|
||||
LOG_WARNING("serve.c", "sist2 was not linked with latest mongoose version, "
|
||||
"serving file from disk might not work as expected.")
|
||||
}
|
||||
|
||||
const char *path = cJSON_GetObjectItem(json, "path")->valuestring;
|
||||
const char *name = cJSON_GetObjectItem(json, "name")->valuestring;
|
||||
const char *ext = cJSON_GetObjectItem(json, "extension")->valuestring;
|
||||
@@ -246,21 +274,41 @@ void serve_file_from_disk(cJSON *json, index_t *idx, struct mg_connection *nc, s
|
||||
|
||||
char disposition[8192];
|
||||
snprintf(disposition, sizeof(disposition),
|
||||
"Content-Disposition: inline; filename=\"%s%s%s\"\r\nAccept-Ranges: bytes\r\n",
|
||||
HTTP_SERVER_HEADER "Content-Disposition: inline; filename=\"%s%s%s\"\r\nAccept-Ranges: bytes\r\n",
|
||||
name, strlen(ext) == 0 ? "" : ".", ext);
|
||||
|
||||
mg_http_serve_file(nc, hm, full_path, mime, disposition);
|
||||
}
|
||||
|
||||
void cache_es_version() {
|
||||
static int is_cached = FALSE;
|
||||
|
||||
if (is_cached == TRUE) {
|
||||
return;
|
||||
}
|
||||
|
||||
es_version_t *es_version = elastic_get_version(WebCtx.es_url);
|
||||
if (es_version != NULL) {
|
||||
WebCtx.es_version = es_version;
|
||||
is_cached = TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
void index_info(struct mg_connection *nc) {
|
||||
|
||||
cache_es_version();
|
||||
|
||||
cJSON *json = cJSON_CreateObject();
|
||||
cJSON *arr = cJSON_AddArrayToObject(json, "indices");
|
||||
|
||||
cJSON_AddStringToObject(json, "mongooseVersion", MG_VERSION);
|
||||
cJSON_AddStringToObject(json, "esIndex", WebCtx.es_index);
|
||||
cJSON_AddStringToObject(json, "version", Version);
|
||||
cJSON_AddStringToObject(json, "esVersion", format_es_version(WebCtx.es_version));
|
||||
cJSON_AddBoolToObject(json, "esVersionSupported", IS_SUPPORTED_ES_VERSION(WebCtx.es_version));
|
||||
cJSON_AddBoolToObject(json, "esVersionLegacy", USE_LEGACY_ES_SETTINGS(WebCtx.es_version));
|
||||
cJSON_AddStringToObject(json, "platform", QUOTE(SIST_PLATFORM));
|
||||
cJSON_AddStringToObject(json, "sist2Hash", Sist2CommitHash);
|
||||
cJSON_AddStringToObject(json, "libscanHash", LibScanCommitHash);
|
||||
cJSON_AddStringToObject(json, "lang", WebCtx.lang);
|
||||
cJSON_AddBoolToObject(json, "dev", WebCtx.dev);
|
||||
#ifdef SIST_DEBUG
|
||||
@@ -295,7 +343,7 @@ void document_info(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (hm->uri.len != MD5_STR_LENGTH + 2) {
|
||||
LOG_DEBUGF("serve.c", "Invalid document_info path: %.*s", (int) hm->uri.len, hm->uri.ptr)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -309,14 +357,14 @@ void document_info(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
cJSON *index_id = cJSON_GetObjectItem(source, "index");
|
||||
if (index_id == NULL) {
|
||||
cJSON_Delete(doc);
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
index_t *idx = get_index_by_id(index_id->valuestring);
|
||||
if (idx == NULL) {
|
||||
cJSON_Delete(doc);
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -331,7 +379,7 @@ void file(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (hm->uri.len != MD5_STR_LENGTH + 2) {
|
||||
LOG_DEBUGF("serve.c", "Invalid file path: %.*s", (int) hm->uri.len, hm->uri.ptr)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -350,7 +398,7 @@ void file(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
index_id = cJSON_GetObjectItem(source, "index");
|
||||
if (index_id == NULL) {
|
||||
cJSON_Delete(doc);
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
cJSON *parent = cJSON_GetObjectItem(source, "parent");
|
||||
@@ -364,7 +412,7 @@ void file(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (idx == NULL) {
|
||||
cJSON_Delete(doc);
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -433,7 +481,7 @@ tag_req_t *parse_tag_request(cJSON *json) {
|
||||
void tag(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
if (hm->uri.len != MD5_STR_LENGTH + 4) {
|
||||
LOG_DEBUGF("serve.c", "Invalid tag path: %.*s", (int) hm->uri.len, hm->uri.ptr)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -443,14 +491,14 @@ void tag(struct mg_connection *nc, struct mg_http_message *hm) {
|
||||
|
||||
if (hm->body.len < 2 || hm->method.len != 4 || memcmp(&hm->method, "POST", 4) == 0) {
|
||||
LOG_DEBUG("serve.c", "Invalid tag request")
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
store_t *store = get_tag_store(arg_index);
|
||||
if (store == NULL) {
|
||||
LOG_DEBUGF("serve.c", "Could not get tag store for index: %s", arg_index)
|
||||
mg_http_reply(nc, 404, "", "Not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -596,7 +644,7 @@ static void ev_router(struct mg_connection *nc, int ev, void *ev_data, UNUSED(vo
|
||||
} else if (mg_http_match_uri(hm, "/d/*")) {
|
||||
document_info(nc, hm);
|
||||
} else {
|
||||
mg_http_reply(nc, 404, "", "Page not found");
|
||||
HTTP_REPLY_NOT_FOUND
|
||||
}
|
||||
|
||||
} else if (ev == MG_EV_POLL) {
|
||||
@@ -626,7 +674,8 @@ static void ev_router(struct mg_connection *nc, int ev, void *ev_data, UNUSED(vo
|
||||
free(tmp);
|
||||
}
|
||||
|
||||
mg_http_reply(nc, 500, "", "");
|
||||
mg_http_reply(nc, 500, HTTP_SERVER_HEADER HTTP_TEXT_TYPE_HEADER,
|
||||
"Elasticsearch error, see server logs.");
|
||||
}
|
||||
|
||||
free_response(r);
|
||||
@@ -640,7 +689,7 @@ static void ev_router(struct mg_connection *nc, int ev, void *ev_data, UNUSED(vo
|
||||
|
||||
void serve(const char *listen_address) {
|
||||
|
||||
printf("Starting web server @ http://%s\n", listen_address);
|
||||
LOG_INFOF("serve.c", "Starting web server @ http://%s", listen_address)
|
||||
|
||||
struct mg_mgr mgr;
|
||||
mg_mgr_init(&mgr);
|
||||
|
||||
10
src/web/static_generated.c
vendored
10
src/web/static_generated.c
vendored
File diff suppressed because one or more lines are too long
@@ -39,7 +39,7 @@ def sist2_index(files, *args):
|
||||
return iter(sist2_index_to_dict("test_i"))
|
||||
|
||||
|
||||
def sist2_incremental_index(files, func=None, *args):
|
||||
def sist2_incremental_index(files, func=None, incremental_index=False, *args):
|
||||
path = copy_files(files)
|
||||
|
||||
if func:
|
||||
@@ -47,11 +47,13 @@ def sist2_incremental_index(files, func=None, *args):
|
||||
|
||||
shutil.rmtree("test_i_inc", ignore_errors=True)
|
||||
sist2("scan", path, "-o", "test_i_inc", "--incremental", "test_i", *args)
|
||||
return iter(sist2_index_to_dict("test_i_inc"))
|
||||
return iter(sist2_index_to_dict("test_i_inc", incremental_index))
|
||||
|
||||
|
||||
def sist2_index_to_dict(index):
|
||||
res = sist2("index", "--print", index)
|
||||
def sist2_index_to_dict(index, incremental_index=False):
|
||||
args = ["--incremental-index"] if incremental_index else []
|
||||
|
||||
res = sist2("index", "--print", "--very-verbose", *args, index)
|
||||
|
||||
for line in res.splitlines():
|
||||
if line:
|
||||
@@ -75,6 +77,7 @@ class ScanTest(unittest.TestCase):
|
||||
|
||||
file_count = sum(1 for _ in sist2_index(TEST_FILES))
|
||||
self.assertEqual(sum(1 for _ in sist2_incremental_index(TEST_FILES, remove_files)), file_count - 2)
|
||||
self.assertEqual(sum(1 for _ in sist2_incremental_index(TEST_FILES, add_files, incremental_index=True)), 3)
|
||||
self.assertEqual(sum(1 for _ in sist2_incremental_index(TEST_FILES, add_files)), file_count + 3)
|
||||
|
||||
|
||||
|
||||
2
third-party/argparse
vendored
2
third-party/argparse
vendored
Submodule third-party/argparse updated: ffd9c23427...225141eb3d
1
third-party/libscan
vendored
1
third-party/libscan
vendored
Submodule third-party/libscan deleted from 3787475ecb
12
third-party/libscan/.gitignore
vendored
Normal file
12
third-party/libscan/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
.idea/
|
||||
cmake_install.cmake
|
||||
Makefile
|
||||
libscan.a
|
||||
libscan.so
|
||||
*.cbp
|
||||
CMakeFiles
|
||||
CMakeCache.txt
|
||||
scan_test
|
||||
third-party/ext_*
|
||||
libscan-test-files
|
||||
scan_*_test
|
||||
249
third-party/libscan/CMakeLists.txt
vendored
Normal file
249
third-party/libscan/CMakeLists.txt
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
cmake_minimum_required(VERSION 3.15)
|
||||
|
||||
project(scan)
|
||||
set(CMAKE_C_STANDARD 11)
|
||||
|
||||
option(BUILD_TESTS "Build tests" on)
|
||||
|
||||
add_subdirectory(third-party/antiword)
|
||||
if (SIST_DEBUG)
|
||||
add_compile_definitions(
|
||||
antiword
|
||||
DEBUG
|
||||
)
|
||||
target_compile_options(
|
||||
antiword
|
||||
PRIVATE
|
||||
-g
|
||||
-fstack-protector
|
||||
-fno-omit-frame-pointer
|
||||
-fsanitize=address
|
||||
-fno-inline
|
||||
)
|
||||
else()
|
||||
add_compile_definitions(
|
||||
antiword
|
||||
NDEBUG
|
||||
)
|
||||
endif()
|
||||
|
||||
add_library(
|
||||
scan
|
||||
libscan/util.c libscan/util.h
|
||||
libscan/scan.h
|
||||
libscan/macros.h
|
||||
|
||||
libscan/text/text.c libscan/text/text.h
|
||||
libscan/arc/arc.c libscan/arc/arc.h
|
||||
libscan/ebook/ebook.c libscan/ebook/ebook.h
|
||||
libscan/comic/comic.c libscan/comic/comic.h
|
||||
libscan/ooxml/ooxml.c libscan/ooxml/ooxml.h
|
||||
libscan/media/media.c libscan/media/media.h
|
||||
libscan/font/font.c libscan/font/font.h
|
||||
libscan/msdoc/msdoc.c libscan/msdoc/msdoc.h
|
||||
libscan/json/json.c libscan/json/json.h
|
||||
libscan/wpd/wpd.c libscan/wpd/wpd.h libscan/wpd/libwpd_c_api.h libscan/wpd/libwpd_c_api.cpp
|
||||
|
||||
third-party/utf8.h
|
||||
libscan/mobi/scan_mobi.c libscan/mobi/scan_mobi.h libscan/raw/raw.c libscan/raw/raw.h)
|
||||
set_target_properties(scan PROPERTIES LINKER_LANGUAGE C)
|
||||
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a .lib .so)
|
||||
|
||||
find_package(cJSON CONFIG REQUIRED)
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(BZip2 REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
find_package(Tesseract CONFIG REQUIRED)
|
||||
find_package(OpenJPEG CONFIG REQUIRED)
|
||||
find_package(JPEG REQUIRED)
|
||||
find_package(LibXml2 REQUIRED)
|
||||
find_package(LibLZMA REQUIRED)
|
||||
find_package(ZLIB REQUIRED)
|
||||
find_package(unofficial-pcre CONFIG REQUIRED)
|
||||
|
||||
|
||||
find_library(JBIG2DEC_LIB NAMES jbig2decd jbig2dec)
|
||||
find_library(HARFBUZZ_LIB NAMES harfbuzz harfbuzzd)
|
||||
find_library(FREETYPE_LIB NAMES freetype freetyped)
|
||||
find_package(unofficial-brotli CONFIG REQUIRED)
|
||||
find_library(LZO2_LIB NAMES lzo2)
|
||||
|
||||
find_library(RAW_LIB NAMES libraw.a)
|
||||
find_library(MUPDF_LIB NAMES liblibmupdf.a)
|
||||
find_library(CMS_LIB NAMES lcms2)
|
||||
find_library(JAS_LIB NAMES jasper)
|
||||
find_library(GUMBO_LIB NAMES gumbo)
|
||||
find_library(GOMP_LIB NAMES libgomp.a gomp PATHS /usr/lib/gcc/x86_64-linux-gnu/5/ /usr/lib/gcc/x86_64-linux-gnu/9/ /usr/lib/gcc/x86_64-linux-gnu/10/ /usr/lib/gcc/aarch64-linux-gnu/7/ /usr/lib/gcc/aarch64-linux-gnu/9/ /usr/lib/gcc/x86_64-linux-gnu/7/)
|
||||
|
||||
|
||||
target_compile_options(
|
||||
scan
|
||||
PRIVATE
|
||||
-g
|
||||
)
|
||||
|
||||
include(ExternalProject)
|
||||
find_program(MAKE_EXE NAMES gmake nmake make)
|
||||
ExternalProject_Add(
|
||||
libmobi
|
||||
GIT_REPOSITORY https://github.com/simon987/libmobi.git
|
||||
GIT_TAG "public"
|
||||
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ""
|
||||
TEST_COMMAND ""
|
||||
CONFIGURE_COMMAND ./autogen.sh && ./configure
|
||||
INSTALL_COMMAND ""
|
||||
|
||||
PREFIX "third-party/ext_libmobi"
|
||||
SOURCE_DIR "third-party/ext_libmobi/src/libmobi"
|
||||
BINARY_DIR "third-party/ext_libmobi/src/libmobi"
|
||||
|
||||
BUILD_COMMAND ${MAKE_EXE} -j 8 --silent
|
||||
)
|
||||
|
||||
SET(MOBI_LIB_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_libmobi/src/libmobi/src/.libs/)
|
||||
SET(MOBI_INCLUDE_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_libmobi/src/libmobi/src/)
|
||||
|
||||
if (SIST_DEBUG)
|
||||
SET(FFMPEG_DEBUG "--enable-debug=3" "--disable-optimizations")
|
||||
else()
|
||||
SET(FFMPEG_DEBUG "")
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(
|
||||
ffmpeg
|
||||
GIT_REPOSITORY https://git.ffmpeg.org/ffmpeg.git
|
||||
GIT_TAG "n4.4"
|
||||
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ""
|
||||
TEST_COMMAND ""
|
||||
CONFIGURE_COMMAND ./configure --disable-shared --enable-static --disable-ffmpeg --disable-ffplay
|
||||
--disable-ffprobe --disable-doc --disable-manpages --disable-postproc --disable-avfilter --disable-alsa
|
||||
--disable-lzma --disable-xlib --disable-vdpau --disable-vaapi --disable-sdl2
|
||||
--disable-network ${FFMPEG_DEBUG}
|
||||
INSTALL_COMMAND ""
|
||||
|
||||
PREFIX "third-party/ext_ffmpeg"
|
||||
SOURCE_DIR "third-party/ext_ffmpeg/src/ffmpeg"
|
||||
BINARY_DIR "third-party/ext_ffmpeg/src/ffmpeg"
|
||||
|
||||
BUILD_COMMAND ${MAKE_EXE} -j33 --silent
|
||||
)
|
||||
|
||||
SET(FFMPEG_LIB_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_ffmpeg/src/ffmpeg)
|
||||
SET(FFMPEG_INCLUDE_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_ffmpeg/src/ffmpeg)
|
||||
|
||||
ExternalProject_Add(
|
||||
libwpd
|
||||
URL http://prdownloads.sourceforge.net/libwpd/libwpd-0.9.9.tar.gz
|
||||
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ""
|
||||
TEST_COMMAND ""
|
||||
CONFIGURE_COMMAND ./configure --without-docs --enable-static --disable-shared
|
||||
INSTALL_COMMAND ""
|
||||
|
||||
PREFIX "third-party/ext_libwpd"
|
||||
SOURCE_DIR "third-party/ext_libwpd/src/libwpd"
|
||||
BINARY_DIR "third-party/ext_libwpd/src/libwpd"
|
||||
|
||||
BUILD_COMMAND ${MAKE_EXE} -j33
|
||||
)
|
||||
SET(WPD_LIB_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_libwpd/src/libwpd/src/lib/.libs/)
|
||||
SET(WPD_INCLUDE_DIR ${CMAKE_CURRENT_BINARY_DIR}/third-party/ext_libwpd/src/libwpd/inc/)
|
||||
|
||||
add_dependencies(
|
||||
scan
|
||||
libmobi
|
||||
ffmpeg
|
||||
antiword
|
||||
libwpd
|
||||
)
|
||||
|
||||
target_link_libraries(
|
||||
scan
|
||||
PUBLIC
|
||||
|
||||
cjson
|
||||
${LibArchive_LIBRARIES}
|
||||
ZLIB::ZLIB
|
||||
BZip2::BZip2
|
||||
lz4::lz4
|
||||
${LZO2_LIB}
|
||||
LibLZMA::LibLZMA
|
||||
|
||||
${MUPDF_LIB}
|
||||
openjp2
|
||||
|
||||
${MOBI_LIB_DIR}/libmobi.a
|
||||
|
||||
${WPD_LIB_DIR}/libwpd-0.9.a
|
||||
${WPD_LIB_DIR}/libwpd-stream-0.9.a
|
||||
|
||||
${FREETYPE_LIB}
|
||||
${HARFBUZZ_LIB}
|
||||
${JBIG2DEC_LIB}
|
||||
|
||||
stdc++
|
||||
|
||||
-Wl,--whole-archive
|
||||
m
|
||||
-Wl,--no-whole-archive
|
||||
|
||||
${JPEG_LIBRARIES}
|
||||
${Tesseract_LIBRARIES}
|
||||
${LIBXML2_LIBRARIES}
|
||||
${FREETYPE_LIB}
|
||||
unofficial::brotli::brotlidec-static
|
||||
|
||||
${FFMPEG_LIB_DIR}/libavformat/libavformat.a
|
||||
${FFMPEG_LIB_DIR}/libavcodec/libavcodec.a
|
||||
${FFMPEG_LIB_DIR}/libavutil/libavutil.a
|
||||
${FFMPEG_LIB_DIR}/libswresample/libswresample.a
|
||||
${FFMPEG_LIB_DIR}/libswscale/libswscale.a
|
||||
|
||||
z
|
||||
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
|
||||
${RAW_LIB}
|
||||
${GOMP_LIB}
|
||||
${CMS_LIB}
|
||||
${JAS_LIB}
|
||||
${GUMBO_LIB}
|
||||
dl
|
||||
antiword
|
||||
unofficial::pcre::pcre unofficial::pcre::pcre16 unofficial::pcre::pcre32 unofficial::pcre::pcrecpp
|
||||
)
|
||||
|
||||
target_include_directories(
|
||||
scan
|
||||
PUBLIC
|
||||
${MUPDF_INC_DIR}
|
||||
${JPEG_INCLUDE_DIR}
|
||||
${LIBXML2_INCLUDE_DIR}
|
||||
${FFMPEG_INCLUDE_DIR}
|
||||
${MOBI_INCLUDE_DIR}
|
||||
${WPD_INCLUDE_DIR}
|
||||
)
|
||||
|
||||
if (BUILD_TESTS)
|
||||
find_package(GTest CONFIG REQUIRED)
|
||||
|
||||
add_executable(scan_ub_test test/main.cpp test/test_util.cpp test/test_util.h)
|
||||
target_compile_options(scan_ub_test PRIVATE -g -fsanitize=undefined -fno-omit-frame-pointer)
|
||||
target_link_libraries(scan_ub_test PRIVATE GTest::gtest GTest::gtest_main -fsanitize=undefined scan)
|
||||
|
||||
add_executable(scan_a_test test/main.cpp test/test_util.cpp test/test_util.h)
|
||||
target_compile_options(scan_a_test PRIVATE -g -fsanitize=address -fno-omit-frame-pointer)
|
||||
target_link_libraries(scan_a_test PRIVATE GTest::gtest GTest::gtest_main -fsanitize=address scan)
|
||||
|
||||
add_executable(scan_test test/main.cpp test/test_util.cpp test/test_util.h)
|
||||
target_compile_options(scan_test PRIVATE -g -fno-omit-frame-pointer)
|
||||
target_link_libraries(scan_test PRIVATE GTest::gtest GTest::gtest_main scan)
|
||||
endif()
|
||||
4
third-party/libscan/README.md
vendored
Normal file
4
third-party/libscan/README.md
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
### Run fuzz tests:
|
||||
```bash
|
||||
./scan_a_test --gtest_filter=*Fuzz* --gtest_repeat=100
|
||||
```
|
||||
244
third-party/libscan/libscan/arc/arc.c
vendored
Normal file
244
third-party/libscan/libscan/arc/arc.c
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
#include "arc.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <fcntl.h>
|
||||
#include <openssl/evp.h>
|
||||
#include <pcre.h>
|
||||
|
||||
|
||||
int should_parse_filtered_file(const char *filepath, int ext) {
|
||||
char tmp[PATH_MAX * 2];
|
||||
|
||||
if (ext == 0) {
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
if (strncmp(filepath + ext, "tgz", 3) == 0) {
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
memcpy(tmp, filepath, ext - 1);
|
||||
*(tmp + ext - 1) = '\0';
|
||||
|
||||
char *idx = strrchr(tmp, '.');
|
||||
|
||||
if (idx == NULL) {
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
if (strcmp(idx, ".tar") == 0) {
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
void arc_close(struct vfile *f) {
|
||||
SHA1_Final(f->sha1_digest, &f->sha1_ctx);
|
||||
|
||||
if (f->rewind_buffer != NULL) {
|
||||
free(f->rewind_buffer);
|
||||
f->rewind_buffer = NULL;
|
||||
f->rewind_buffer_size = 0;
|
||||
f->rewind_buffer_cursor = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int arc_read(struct vfile *f, void *buf, size_t size) {
|
||||
|
||||
int bytes_copied = 0;
|
||||
|
||||
if (f->rewind_buffer_size != 0) {
|
||||
if (size > f->rewind_buffer_size) {
|
||||
memcpy(buf, f->rewind_buffer + f->rewind_buffer_cursor, f->rewind_buffer_size);
|
||||
|
||||
bytes_copied = f->rewind_buffer_size;
|
||||
size -= f->rewind_buffer_size;
|
||||
buf += f->rewind_buffer_size;
|
||||
f->rewind_buffer_size = 0;
|
||||
} else {
|
||||
memcpy(buf, f->rewind_buffer + f->rewind_buffer_cursor, size);
|
||||
f->rewind_buffer_size -= (int) size;
|
||||
f->rewind_buffer_cursor += (int) size;
|
||||
|
||||
return (int) size;
|
||||
}
|
||||
}
|
||||
|
||||
size_t bytes_read = archive_read_data(f->arc, buf, size);
|
||||
|
||||
if (bytes_read != 0 && bytes_read <= size && f->calculate_checksum) {
|
||||
f->has_checksum = TRUE;
|
||||
|
||||
safe_sha1_update(&f->sha1_ctx, (unsigned char *) buf, bytes_read);
|
||||
}
|
||||
|
||||
if (bytes_read != size && archive_errno(f->arc) != 0) {
|
||||
const char *error_str = archive_error_string(f->arc);
|
||||
if (error_str != NULL) {
|
||||
f->logf(f->filepath, LEVEL_ERROR, "Error reading archive file: %s", error_str);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
return (int) bytes_read + bytes_copied;
|
||||
}
|
||||
|
||||
int arc_read_rewindable(struct vfile *f, void *buf, size_t size) {
|
||||
|
||||
if (f->rewind_buffer != NULL) {
|
||||
fprintf(stderr, "Allocated rewind buffer more than once for %s", f->filepath);
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
size_t bytes_read = archive_read_data(f->arc, buf, size);
|
||||
|
||||
if (bytes_read != size && archive_errno(f->arc) != 0) {
|
||||
const char *error_str = archive_error_string(f->arc);
|
||||
if (error_str != NULL) {
|
||||
f->logf(f->filepath, LEVEL_ERROR, "Error reading archive file: %s", error_str);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
f->rewind_buffer = malloc(size);
|
||||
f->rewind_buffer_size = (int) size;
|
||||
f->rewind_buffer_cursor = 0;
|
||||
memcpy(f->rewind_buffer, buf, size);
|
||||
|
||||
return (int) bytes_read;
|
||||
}
|
||||
|
||||
int arc_open(scan_arc_ctx_t *ctx, vfile_t *f, struct archive **a, arc_data_t *arc_data, int allow_recurse) {
|
||||
arc_data->f = f;
|
||||
|
||||
if (f->is_fs_file) {
|
||||
*a = archive_read_new();
|
||||
archive_read_support_filter_all(*a);
|
||||
archive_read_support_format_all(*a);
|
||||
if (ctx->passphrase[0] != 0) {
|
||||
archive_read_add_passphrase(*a, ctx->passphrase);
|
||||
}
|
||||
|
||||
return archive_read_open_filename(*a, f->filepath, ARC_BUF_SIZE);
|
||||
} else if (allow_recurse) {
|
||||
*a = archive_read_new();
|
||||
archive_read_support_filter_all(*a);
|
||||
archive_read_support_format_all(*a);
|
||||
if (ctx->passphrase[0] != 0) {
|
||||
archive_read_add_passphrase(*a, ctx->passphrase);
|
||||
}
|
||||
|
||||
return archive_read_open(
|
||||
*a, arc_data,
|
||||
vfile_open_callback,
|
||||
vfile_read_callback,
|
||||
vfile_close_callback
|
||||
);
|
||||
} else {
|
||||
return ARC_SKIPPED;
|
||||
}
|
||||
}
|
||||
|
||||
static __thread int sub_strings[30];
|
||||
#define EXCLUDED(str) (pcre_exec(exclude, exclude_extra, str, strlen(str), 0, 0, sub_strings, sizeof(sub_strings)) >= 0)
|
||||
|
||||
scan_code_t parse_archive(scan_arc_ctx_t *ctx, vfile_t *f, document_t *doc, pcre *exclude, pcre_extra *exclude_extra) {
|
||||
|
||||
struct archive *a = NULL;
|
||||
struct archive_entry *entry = NULL;
|
||||
|
||||
arc_data_t arc_data;
|
||||
arc_data.f = f;
|
||||
|
||||
int ret = arc_open(ctx, f, &a, &arc_data, ctx->mode == ARC_MODE_RECURSE);
|
||||
if (ret == ARC_SKIPPED) {
|
||||
return SCAN_OK;
|
||||
}
|
||||
|
||||
if (ret != ARCHIVE_OK) {
|
||||
CTX_LOG_ERRORF(f->filepath, "(arc.c) [%d] %s", ret, archive_error_string(a))
|
||||
archive_read_free(a);
|
||||
return SCAN_ERR_READ;
|
||||
}
|
||||
|
||||
if (ctx->mode == ARC_MODE_LIST) {
|
||||
dyn_buffer_t buf = dyn_buffer_create();
|
||||
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
if (S_ISREG(archive_entry_stat(entry)->st_mode)) {
|
||||
const char *utf8_name = archive_entry_pathname_utf8(entry);
|
||||
const char *file_path = utf8_name == NULL ? archive_entry_pathname(entry) : utf8_name;
|
||||
|
||||
dyn_buffer_append_string(&buf, file_path);
|
||||
dyn_buffer_write_char(&buf, ' ');
|
||||
}
|
||||
}
|
||||
dyn_buffer_write_char(&buf, '\0');
|
||||
|
||||
meta_line_t *meta_list = malloc(sizeof(meta_line_t) + buf.cur);
|
||||
meta_list->key = MetaContent;
|
||||
strcpy(meta_list->str_val, buf.buf);
|
||||
APPEND_META(doc, meta_list)
|
||||
dyn_buffer_destroy(&buf);
|
||||
|
||||
} else {
|
||||
|
||||
parse_job_t *sub_job = malloc(sizeof(parse_job_t) + PATH_MAX * 2);
|
||||
|
||||
sub_job->vfile.close = arc_close;
|
||||
sub_job->vfile.read = arc_read;
|
||||
sub_job->vfile.read_rewindable = arc_read_rewindable;
|
||||
sub_job->vfile.reset = NULL;
|
||||
sub_job->vfile.arc = a;
|
||||
sub_job->vfile.filepath = sub_job->filepath;
|
||||
sub_job->vfile.is_fs_file = FALSE;
|
||||
sub_job->vfile.rewind_buffer_size = 0;
|
||||
sub_job->vfile.rewind_buffer = NULL;
|
||||
sub_job->vfile.log = ctx->log;
|
||||
sub_job->vfile.logf = ctx->logf;
|
||||
sub_job->vfile.has_checksum = FALSE;
|
||||
sub_job->vfile.calculate_checksum = f->calculate_checksum;
|
||||
memcpy(sub_job->parent, doc->path_md5, MD5_DIGEST_LENGTH);
|
||||
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
sub_job->vfile.info = *archive_entry_stat(entry);
|
||||
if (S_ISREG(sub_job->vfile.info.st_mode)) {
|
||||
|
||||
const char *utf8_name = archive_entry_pathname_utf8(entry);
|
||||
|
||||
if (utf8_name == NULL) {
|
||||
sprintf(sub_job->filepath, "%s#/%s", f->filepath, archive_entry_pathname(entry));
|
||||
} else {
|
||||
sprintf(sub_job->filepath, "%s#/%s", f->filepath, utf8_name);
|
||||
}
|
||||
sub_job->base = (int) (strrchr(sub_job->filepath, '/') - sub_job->filepath) + 1;
|
||||
|
||||
// Handle excludes
|
||||
if (exclude != NULL && EXCLUDED(sub_job->filepath)) {
|
||||
CTX_LOG_DEBUGF("arc.c", "Excluded: %s", sub_job->filepath)
|
||||
continue;
|
||||
}
|
||||
|
||||
char *p = strrchr(sub_job->filepath, '.');
|
||||
if (p != NULL && (p - sub_job->filepath) > strlen(f->filepath)) {
|
||||
sub_job->ext = (int) (p - sub_job->filepath + 1);
|
||||
} else {
|
||||
sub_job->ext = (int) strlen(sub_job->filepath);
|
||||
}
|
||||
|
||||
SHA1_Init(&sub_job->vfile.sha1_ctx);
|
||||
|
||||
ctx->parse(sub_job);
|
||||
}
|
||||
}
|
||||
|
||||
free(sub_job);
|
||||
}
|
||||
|
||||
archive_read_free(a);
|
||||
return SCAN_OK;
|
||||
}
|
||||
80
third-party/libscan/libscan/arc/arc.h
vendored
Normal file
80
third-party/libscan/libscan/arc/arc.h
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
#ifndef SCAN_ARC_H
|
||||
#define SCAN_ARC_H
|
||||
|
||||
#include <archive.h>
|
||||
#include <archive_entry.h>
|
||||
#include <fcntl.h>
|
||||
#include <pcre.h>
|
||||
#include "../scan.h"
|
||||
|
||||
# define ARC_SKIPPED (-1)
|
||||
#define ARC_MODE_SKIP 0
|
||||
#define ARC_MODE_LIST 1
|
||||
#define ARC_MODE_SHALLOW 2
|
||||
#define ARC_MODE_RECURSE 3
|
||||
typedef int archive_mode_t;
|
||||
|
||||
typedef struct {
|
||||
archive_mode_t mode;
|
||||
|
||||
parse_callback_t parse;
|
||||
log_callback_t log;
|
||||
logf_callback_t logf;
|
||||
store_callback_t store;
|
||||
char passphrase[4096];
|
||||
} scan_arc_ctx_t;
|
||||
|
||||
#define ARC_BUF_SIZE 8192
|
||||
|
||||
typedef struct {
|
||||
vfile_t *f;
|
||||
char buf[ARC_BUF_SIZE];
|
||||
} arc_data_t;
|
||||
|
||||
static int vfile_open_callback(struct archive *a, void *user_data) {
|
||||
arc_data_t *data = (arc_data_t *) user_data;
|
||||
|
||||
if (!data->f->is_fs_file) {
|
||||
SHA1_Init(&data->f->sha1_ctx);
|
||||
}
|
||||
|
||||
return ARCHIVE_OK;
|
||||
}
|
||||
|
||||
static long vfile_read_callback(struct archive *a, void *user_data, const void **buf) {
|
||||
arc_data_t *data = (arc_data_t *) user_data;
|
||||
|
||||
*buf = data->buf;
|
||||
long ret = data->f->read(data->f, data->buf, sizeof(data->buf));
|
||||
|
||||
if (!data->f->is_fs_file && ret > 0) {
|
||||
data->f->has_checksum = TRUE;
|
||||
safe_sha1_update(&data->f->sha1_ctx, (unsigned char*)data->buf, ret);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int vfile_close_callback(struct archive *a, void *user_data) {
|
||||
arc_data_t *data = (arc_data_t *) user_data;
|
||||
|
||||
if (!data->f->is_fs_file) {
|
||||
SHA1_Final((unsigned char *) data->f->sha1_digest, &data->f->sha1_ctx);
|
||||
}
|
||||
|
||||
return ARCHIVE_OK;
|
||||
}
|
||||
|
||||
int arc_open(scan_arc_ctx_t *ctx, vfile_t *f, struct archive **a, arc_data_t *arc_data, int allow_recurse);
|
||||
|
||||
int should_parse_filtered_file(const char *filepath, int ext);
|
||||
|
||||
scan_code_t parse_archive(scan_arc_ctx_t *ctx, vfile_t *f, document_t *doc, pcre *exclude, pcre_extra *exclude_extra);
|
||||
|
||||
int arc_read(struct vfile *f, void *buf, size_t size);
|
||||
|
||||
int arc_read_rewindable(struct vfile *f, void *buf, size_t size);
|
||||
|
||||
void arc_close(struct vfile *f);
|
||||
|
||||
#endif
|
||||
71
third-party/libscan/libscan/comic/comic.c
vendored
Normal file
71
third-party/libscan/libscan/comic/comic.c
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
#include "comic.h"
|
||||
#include "../media/media.h"
|
||||
#include "../arc/arc.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <archive.h>
|
||||
|
||||
static scan_arc_ctx_t arc_ctx = (scan_arc_ctx_t) {.passphrase = {0,}};
|
||||
|
||||
void parse_comic(scan_comic_ctx_t *ctx, vfile_t *f, document_t *doc) {
|
||||
struct archive *a = NULL;
|
||||
struct archive_entry *entry = NULL;
|
||||
arc_data_t arc_data;
|
||||
|
||||
if (!ctx->enable_tn) {
|
||||
return;
|
||||
}
|
||||
|
||||
int ret = arc_open(&arc_ctx, f, &a, &arc_data, TRUE);
|
||||
if (ret != ARCHIVE_OK) {
|
||||
CTX_LOG_ERRORF(f->filepath, "(cbr.c) [%d] %s", ret, archive_error_string(a))
|
||||
archive_read_free(a);
|
||||
return;
|
||||
}
|
||||
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
struct stat info = *archive_entry_stat(entry);
|
||||
if (S_ISREG(info.st_mode)) {
|
||||
const char *utf8_name = archive_entry_pathname_utf8(entry);
|
||||
const char *file_path = utf8_name == NULL ? archive_entry_pathname(entry) : utf8_name;
|
||||
|
||||
char *p = strrchr(file_path, '.');
|
||||
if (p != NULL && (strcmp(p, ".png") == 0 || strcmp(p, ".jpg") == 0 || strcmp(p, ".jpeg") == 0)) {
|
||||
size_t entry_size = archive_entry_size(entry);
|
||||
void *buf = malloc(entry_size);
|
||||
size_t read = archive_read_data(a, buf, entry_size);
|
||||
|
||||
if (read != entry_size) {
|
||||
const char *err_str = archive_error_string(a);
|
||||
if (err_str) {
|
||||
CTX_LOG_ERRORF("comic.c", "Error while reading entry: %s", err_str)
|
||||
}
|
||||
free(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
scan_media_ctx_t media_ctx = {
|
||||
.tn_count = ctx->enable_tn ? 1 : 0,
|
||||
.tn_size = ctx->tn_size,
|
||||
.tn_qscale = ctx->tn_qscale,
|
||||
.tesseract_lang = NULL,
|
||||
.tesseract_path = NULL,
|
||||
.read_subtitles = FALSE,
|
||||
.max_media_buffer = 0,
|
||||
.log = ctx->log,
|
||||
.logf = ctx->logf,
|
||||
.store = ctx->store,
|
||||
};
|
||||
|
||||
ret = store_image_thumbnail(&media_ctx, buf, entry_size, doc, file_path);
|
||||
free(buf);
|
||||
|
||||
if (ret == TRUE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
archive_read_free(a);
|
||||
}
|
||||
32
third-party/libscan/libscan/comic/comic.h
vendored
Normal file
32
third-party/libscan/libscan/comic/comic.h
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
#ifndef SCAN_CBR_H
|
||||
#define SCAN_CBR_H
|
||||
|
||||
#include <stdlib.h>
|
||||
#include "../ebook/ebook.h"
|
||||
|
||||
typedef struct {
|
||||
log_callback_t log;
|
||||
logf_callback_t logf;
|
||||
store_callback_t store;
|
||||
|
||||
int enable_tn;
|
||||
int tn_size;
|
||||
float tn_qscale;
|
||||
|
||||
unsigned int cbr_mime;
|
||||
unsigned int cbz_mime;
|
||||
} scan_comic_ctx_t;
|
||||
|
||||
__always_inline
|
||||
static int is_cbr(scan_comic_ctx_t *ctx, unsigned int mime) {
|
||||
return mime == ctx->cbr_mime;
|
||||
}
|
||||
|
||||
__always_inline
|
||||
static int is_cbz(scan_comic_ctx_t *ctx, unsigned int mime) {
|
||||
return mime == ctx->cbz_mime;
|
||||
}
|
||||
|
||||
void parse_comic(scan_comic_ctx_t *ctx, vfile_t *f, document_t *doc);
|
||||
|
||||
#endif
|
||||
478
third-party/libscan/libscan/ebook/ebook.c
vendored
Normal file
478
third-party/libscan/libscan/ebook/ebook.c
vendored
Normal file
@@ -0,0 +1,478 @@
|
||||
#include "ebook.h"
|
||||
#include <mupdf/fitz.h>
|
||||
#include <pthread.h>
|
||||
#include <tesseract/capi.h>
|
||||
|
||||
#include "../media/media.h"
|
||||
#include "../arc/arc.h"
|
||||
#include "../ocr/ocr.h"
|
||||
|
||||
/* fill_image callback doesn't let us pass opaque pointers unless I create my own device */
|
||||
__thread text_buffer_t thread_buffer;
|
||||
__thread scan_ebook_ctx_t thread_ctx;
|
||||
|
||||
pthread_mutex_t Mutex;
|
||||
|
||||
static void my_fz_lock(UNUSED(void *user), int lock) {
|
||||
if (lock == FZ_LOCK_FREETYPE) {
|
||||
pthread_mutex_lock(&Mutex);
|
||||
}
|
||||
}
|
||||
|
||||
static void my_fz_unlock(UNUSED(void *user), int lock) {
|
||||
if (lock == FZ_LOCK_FREETYPE) {
|
||||
pthread_mutex_unlock(&Mutex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int pixmap_is_blank(const fz_pixmap *pixmap) {
|
||||
int pixmap_size = pixmap->n * pixmap->w * pixmap->h;
|
||||
const int pixel0 = pixmap->samples[0];
|
||||
for (int i = 0; i < pixmap_size; i++) {
|
||||
if (pixmap->samples[i] != pixel0) {
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
fz_pixmap *
|
||||
load_pixmap(scan_ebook_ctx_t *ctx, int page, fz_context *fzctx, fz_document *fzdoc, document_t *doc, fz_page **cover) {
|
||||
|
||||
int err = 0;
|
||||
|
||||
fz_var(cover);
|
||||
fz_var(err);
|
||||
fz_try(fzctx)*cover = fz_load_page(fzctx, fzdoc, page);
|
||||
fz_catch(fzctx)err = 1;
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_load_page() returned error code [%d] %s", err, fzctx->error.message)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
fz_rect bounds = fz_bound_page(fzctx, *cover);
|
||||
|
||||
float scale;
|
||||
float w = bounds.x1 - bounds.x0;
|
||||
float h = bounds.y1 - bounds.y0;
|
||||
if (w > h) {
|
||||
scale = (float) ctx->tn_size / w;
|
||||
} else {
|
||||
scale = (float) ctx->tn_size / h;
|
||||
}
|
||||
fz_matrix m = fz_scale(scale, scale);
|
||||
|
||||
bounds = fz_transform_rect(bounds, m);
|
||||
fz_irect bbox = fz_round_rect(bounds);
|
||||
fz_pixmap *pixmap = fz_new_pixmap_with_bbox(fzctx, fz_device_rgb(fzctx), bbox, NULL, 0);
|
||||
|
||||
fz_clear_pixmap_with_value(fzctx, pixmap, 0xFF);
|
||||
fz_device *dev = fz_new_draw_device(fzctx, m, pixmap);
|
||||
|
||||
fz_var(err);
|
||||
fz_try(fzctx) {
|
||||
fz_run_page(fzctx, *cover, dev, fz_identity, NULL);
|
||||
} fz_always(fzctx) {
|
||||
fz_close_device(fzctx, dev);
|
||||
fz_drop_device(fzctx, dev);
|
||||
} fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_run_page() returned error code [%d] %s", err, fzctx->error.message)
|
||||
fz_drop_page(fzctx, *cover);
|
||||
fz_drop_pixmap(fzctx, pixmap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (pixmap->n != 3) {
|
||||
CTX_LOG_ERRORF(doc->filepath, "Got unexpected pixmap depth: %d", pixmap->n)
|
||||
fz_drop_page(fzctx, *cover);
|
||||
fz_drop_pixmap(fzctx, pixmap);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return pixmap;
|
||||
}
|
||||
|
||||
int render_cover(scan_ebook_ctx_t *ctx, fz_context *fzctx, document_t *doc, fz_document *fzdoc) {
|
||||
|
||||
fz_page *cover = NULL;
|
||||
fz_pixmap *pixmap = load_pixmap(ctx, 0, fzctx, fzdoc, doc, &cover);
|
||||
if (pixmap == NULL) {
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
if (pixmap_is_blank(pixmap)) {
|
||||
fz_drop_page(fzctx, cover);
|
||||
fz_drop_pixmap(fzctx, pixmap);
|
||||
CTX_LOG_DEBUG(doc->filepath, "Cover page is blank, using page 1 instead")
|
||||
pixmap = load_pixmap(ctx, 1, fzctx, fzdoc, doc, &cover);
|
||||
if (pixmap == NULL) {
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
// RGB24 -> YUV420p
|
||||
AVFrame *scaled_frame = av_frame_alloc();
|
||||
|
||||
struct SwsContext *sws_ctx = sws_getContext(
|
||||
pixmap->w, pixmap->h, AV_PIX_FMT_RGB24,
|
||||
pixmap->w, pixmap->h, AV_PIX_FMT_YUV420P,
|
||||
SIST_SWS_ALGO, 0, 0, 0
|
||||
);
|
||||
|
||||
int dst_buf_len = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pixmap->w, pixmap->h, 1);
|
||||
uint8_t *dst_buf = (uint8_t *) av_malloc(dst_buf_len);
|
||||
|
||||
av_image_fill_arrays(scaled_frame->data, scaled_frame->linesize, dst_buf, AV_PIX_FMT_YUV420P, pixmap->w, pixmap->h,
|
||||
1);
|
||||
|
||||
unsigned char *samples = calloc(1, 1024 * 1024 * 1024);
|
||||
memcpy(samples, pixmap->samples, pixmap->stride * pixmap->h);
|
||||
|
||||
const uint8_t *in_data[1] = {samples,};
|
||||
int in_line_size[1] = {(int) pixmap->stride};
|
||||
|
||||
sws_scale(sws_ctx,
|
||||
in_data, in_line_size,
|
||||
0, pixmap->h,
|
||||
scaled_frame->data, scaled_frame->linesize
|
||||
);
|
||||
|
||||
scaled_frame->width = pixmap->w;
|
||||
scaled_frame->height = pixmap->h;
|
||||
scaled_frame->format = AV_PIX_FMT_YUV420P;
|
||||
|
||||
sws_freeContext(sws_ctx);
|
||||
|
||||
// YUV420p -> JPEG
|
||||
AVCodecContext *jpeg_encoder = alloc_jpeg_encoder(pixmap->w, pixmap->h, ctx->tn_qscale);
|
||||
avcodec_send_frame(jpeg_encoder, scaled_frame);
|
||||
|
||||
AVPacket jpeg_packet;
|
||||
av_init_packet(&jpeg_packet);
|
||||
avcodec_receive_packet(jpeg_encoder, &jpeg_packet);
|
||||
|
||||
APPEND_LONG_META(doc, MetaThumbnail, 1)
|
||||
ctx->store((char *) doc->path_md5, sizeof(doc->path_md5), (char *) jpeg_packet.data, jpeg_packet.size);
|
||||
|
||||
free(samples);
|
||||
av_packet_unref(&jpeg_packet);
|
||||
av_free(*scaled_frame->data);
|
||||
av_frame_free(&scaled_frame);
|
||||
avcodec_free_context(&jpeg_encoder);
|
||||
|
||||
fz_drop_pixmap(fzctx, pixmap);
|
||||
fz_drop_page(fzctx, cover);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
void fz_err_callback(void *user, const char *message) {
|
||||
document_t *doc = (document_t *) user;
|
||||
|
||||
const scan_ebook_ctx_t *ctx = &thread_ctx;
|
||||
CTX_LOG_WARNINGF(doc->filepath, "FZ: %s", message)
|
||||
}
|
||||
|
||||
void fz_warn_callback(void *user, const char *message) {
|
||||
document_t *doc = (document_t *) user;
|
||||
|
||||
const scan_ebook_ctx_t *ctx = &thread_ctx;
|
||||
CTX_LOG_DEBUGF(doc->filepath, "FZ: %s", message)
|
||||
}
|
||||
|
||||
static void init_fzctx(fz_context *fzctx, document_t *doc) {
|
||||
fz_register_document_handlers(fzctx);
|
||||
|
||||
static int mu_is_initialized = FALSE;
|
||||
if (!mu_is_initialized) {
|
||||
pthread_mutex_init(&Mutex, NULL);
|
||||
mu_is_initialized = TRUE;
|
||||
}
|
||||
|
||||
fzctx->warn.print_user = doc;
|
||||
fzctx->warn.print = fz_warn_callback;
|
||||
fzctx->error.print_user = doc;
|
||||
fzctx->error.print = fz_err_callback;
|
||||
|
||||
fzctx->locks.lock = my_fz_lock;
|
||||
fzctx->locks.unlock = my_fz_unlock;
|
||||
}
|
||||
|
||||
static int read_stext_block(fz_stext_block *block, text_buffer_t *tex) {
|
||||
if (block->type != FZ_STEXT_BLOCK_TEXT) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fz_stext_line *line = block->u.t.first_line;
|
||||
while (line != NULL) {
|
||||
text_buffer_append_char(tex, ' ');
|
||||
fz_stext_char *c = line->first_char;
|
||||
while (c != NULL) {
|
||||
if (text_buffer_append_char(tex, c->c) == TEXT_BUF_FULL) {
|
||||
return TEXT_BUF_FULL;
|
||||
}
|
||||
c = c->next;
|
||||
}
|
||||
line = line->next;
|
||||
}
|
||||
text_buffer_append_char(tex, ' ');
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void fill_image_ocr_cb(const char* text, size_t len) {
|
||||
text_buffer_append_string(&thread_buffer, text, len - 1);
|
||||
}
|
||||
|
||||
void fill_image(fz_context *fzctx, UNUSED(fz_device *dev),
|
||||
fz_image *img, UNUSED(fz_matrix ctm), UNUSED(float alpha),
|
||||
UNUSED(fz_color_params color_params)) {
|
||||
|
||||
int l2factor = 0;
|
||||
|
||||
if (img->w >= MIN_OCR_WIDTH && img->h >= MIN_OCR_HEIGHT && OCR_IS_VALID_BPP(img->n)) {
|
||||
fz_pixmap *pix = img->get_pixmap(fzctx, img, NULL, img->w, img->h, &l2factor);
|
||||
ocr_extract_text(thread_ctx.tesseract_path, thread_ctx.tesseract_lang, pix->samples, pix->w, pix->h, pix->n, pix->stride, pix->xres, fill_image_ocr_cb);
|
||||
fz_drop_pixmap(fzctx, pix);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
parse_ebook_mem(scan_ebook_ctx_t *ctx, void *buf, size_t buf_len, const char *mime_str, document_t *doc, int tn_only) {
|
||||
|
||||
fz_context *fzctx = fz_new_context(NULL, NULL, FZ_STORE_DEFAULT);
|
||||
thread_ctx = *ctx;
|
||||
|
||||
init_fzctx(fzctx, doc);
|
||||
|
||||
int err = 0;
|
||||
|
||||
fz_document *fzdoc = NULL;
|
||||
fz_stream *stream = NULL;
|
||||
fz_var(fzdoc);
|
||||
fz_var(stream);
|
||||
fz_var(err);
|
||||
|
||||
fz_try(fzctx) {
|
||||
stream = fz_open_memory(fzctx, buf, buf_len);
|
||||
fzdoc = fz_open_document_with_stream(fzctx, mime_str, stream);
|
||||
} fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
|
||||
if (err != 0) {
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
int page_count = -1;
|
||||
fz_var(err);
|
||||
fz_try(fzctx)page_count = fz_count_pages(fzctx, fzdoc);
|
||||
fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
|
||||
if (err) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_count_pages() returned error code [%d] %s", err, fzctx->error.message)
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
APPEND_LONG_META(doc, MetaPages, page_count)
|
||||
|
||||
if (ctx->enable_tn) {
|
||||
if (render_cover(ctx, fzctx, doc, fzdoc) == FALSE) {
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (tn_only) {
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
char title[8192] = {'\0',};
|
||||
fz_try(fzctx)fz_lookup_metadata(fzctx, fzdoc, FZ_META_INFO_TITLE, title, sizeof(title));
|
||||
fz_catch(fzctx);
|
||||
|
||||
if (strlen(title) > 0) {
|
||||
APPEND_UTF8_META(doc, MetaTitle, title)
|
||||
}
|
||||
|
||||
char author[4096] = {'\0',};
|
||||
fz_try(fzctx)fz_lookup_metadata(fzctx, fzdoc, FZ_META_INFO_AUTHOR, author, sizeof(author));
|
||||
fz_catch(fzctx);
|
||||
|
||||
if (strlen(author) > 0) {
|
||||
APPEND_UTF8_META(doc, MetaAuthor, author)
|
||||
}
|
||||
|
||||
|
||||
if (ctx->content_size > 0) {
|
||||
fz_stext_options opts = {0};
|
||||
thread_buffer = text_buffer_create(ctx->content_size);
|
||||
|
||||
for (int current_page = 0; current_page < page_count; current_page++) {
|
||||
fz_page *page = NULL;
|
||||
fz_var(err);
|
||||
fz_try(fzctx)page = fz_load_page(fzctx, fzdoc, current_page);
|
||||
fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_load_page() returned error code [%d] %s", err, fzctx->error.message)
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
fz_drop_page(fzctx, page);
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
fz_stext_page *stext = fz_new_stext_page(fzctx, fz_bound_page(fzctx, page));
|
||||
fz_device *dev = fz_new_stext_device(fzctx, stext, &opts);
|
||||
dev->stroke_path = NULL;
|
||||
dev->stroke_text = NULL;
|
||||
dev->clip_text = NULL;
|
||||
dev->clip_stroke_path = NULL;
|
||||
dev->clip_stroke_text = NULL;
|
||||
|
||||
if (ctx->tesseract_lang != NULL) {
|
||||
dev->fill_image = fill_image;
|
||||
}
|
||||
|
||||
fz_var(err);
|
||||
fz_try(fzctx)fz_run_page(fzctx, page, dev, fz_identity, NULL);
|
||||
fz_always(fzctx) {
|
||||
fz_close_device(fzctx, dev);
|
||||
fz_drop_device(fzctx, dev);
|
||||
} fz_catch(fzctx)err = fzctx->error.errcode;
|
||||
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "fz_run_page() returned error code [%d] %s", err, fzctx->error.message)
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
fz_drop_page(fzctx, page);
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
return;
|
||||
}
|
||||
|
||||
fz_stext_block *block = stext->first_block;
|
||||
while (block != NULL) {
|
||||
int ret = read_stext_block(block, &thread_buffer);
|
||||
if (ret == TEXT_BUF_FULL) {
|
||||
break;
|
||||
}
|
||||
block = block->next;
|
||||
}
|
||||
fz_drop_stext_page(fzctx, stext);
|
||||
fz_drop_page(fzctx, page);
|
||||
|
||||
if (thread_buffer.dyn_buffer.cur >= ctx->content_size) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
text_buffer_terminate_string(&thread_buffer);
|
||||
|
||||
meta_line_t *meta_content = malloc(sizeof(meta_line_t) + thread_buffer.dyn_buffer.cur);
|
||||
meta_content->key = MetaContent;
|
||||
memcpy(meta_content->str_val, thread_buffer.dyn_buffer.buf, thread_buffer.dyn_buffer.cur);
|
||||
APPEND_META(doc, meta_content)
|
||||
|
||||
text_buffer_destroy(&thread_buffer);
|
||||
}
|
||||
|
||||
fz_drop_stream(fzctx, stream);
|
||||
fz_drop_document(fzctx, fzdoc);
|
||||
fz_drop_context(fzctx);
|
||||
}
|
||||
|
||||
static scan_arc_ctx_t arc_ctx = (scan_arc_ctx_t) {.passphrase = {0,}};
|
||||
|
||||
void parse_epub_fast(scan_ebook_ctx_t *ctx, vfile_t *f, document_t *doc) {
|
||||
struct archive *a = NULL;
|
||||
struct archive_entry *entry = NULL;
|
||||
arc_data_t arc_data;
|
||||
|
||||
text_buffer_t content_buffer = text_buffer_create(ctx->content_size);
|
||||
|
||||
if (!ctx->enable_tn) {
|
||||
return;
|
||||
}
|
||||
|
||||
int ret = arc_open(&arc_ctx, f, &a, &arc_data, TRUE);
|
||||
if (ret != ARCHIVE_OK) {
|
||||
CTX_LOG_ERRORF(f->filepath, "(ebook.c) [%d] %s", ret, archive_error_string(a))
|
||||
archive_read_free(a);
|
||||
return;
|
||||
}
|
||||
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
struct stat info = *archive_entry_stat(entry);
|
||||
if (S_ISREG(info.st_mode)) {
|
||||
const char *utf8_name = archive_entry_pathname_utf8(entry);
|
||||
const char *file_path = utf8_name == NULL ? archive_entry_pathname(entry) : utf8_name;
|
||||
|
||||
char *p = strrchr(file_path, '.');
|
||||
if (p != NULL && (strcmp(p, ".html") == 0 || (strcmp(p, ".xhtml") == 0))) {
|
||||
size_t entry_size = archive_entry_size(entry);
|
||||
void *buf = malloc(entry_size + 1);
|
||||
size_t read = archive_read_data(a, buf, entry_size);
|
||||
*(char *) (buf + entry_size) = '\0';
|
||||
|
||||
if (read != entry_size) {
|
||||
const char *err_str = archive_error_string(a);
|
||||
if (err_str) {
|
||||
CTX_LOG_ERRORF("ebook.c", "Error while reading entry: %s", err_str)
|
||||
}
|
||||
free(buf);
|
||||
break;
|
||||
}
|
||||
|
||||
ret = text_buffer_append_markup(&content_buffer, buf);
|
||||
free(buf);
|
||||
|
||||
if (ret == TEXT_BUF_FULL) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
text_buffer_terminate_string(&content_buffer);
|
||||
|
||||
meta_line_t *meta_content = malloc(sizeof(meta_line_t) + content_buffer.dyn_buffer.cur);
|
||||
meta_content->key = MetaContent;
|
||||
memcpy(meta_content->str_val, content_buffer.dyn_buffer.buf, content_buffer.dyn_buffer.cur);
|
||||
APPEND_META(doc, meta_content)
|
||||
|
||||
text_buffer_destroy(&content_buffer);
|
||||
|
||||
archive_read_free(a);
|
||||
}
|
||||
|
||||
void parse_ebook(scan_ebook_ctx_t *ctx, vfile_t *f, const char *mime_str, document_t *doc) {
|
||||
|
||||
if (ctx->fast_epub_parse && is_epub(mime_str)) {
|
||||
parse_epub_fast(ctx, f, doc);
|
||||
return;
|
||||
}
|
||||
|
||||
size_t buf_len;
|
||||
void *buf = read_all(f, &buf_len);
|
||||
if (buf == NULL) {
|
||||
CTX_LOG_ERROR(f->filepath, "read_all() failed")
|
||||
return;
|
||||
}
|
||||
|
||||
parse_ebook_mem(ctx, buf, buf_len, mime_str, doc, FALSE);
|
||||
free(buf);
|
||||
}
|
||||
31
third-party/libscan/libscan/ebook/ebook.h
vendored
Normal file
31
third-party/libscan/libscan/ebook/ebook.h
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
#ifndef SCAN_EBOOK_H
|
||||
#define SCAN_EBOOK_H
|
||||
|
||||
#include "../scan.h"
|
||||
|
||||
typedef struct {
|
||||
long content_size;
|
||||
int tn_size;
|
||||
int enable_tn;
|
||||
const char *tesseract_lang;
|
||||
const char *tesseract_path;
|
||||
pthread_mutex_t mupdf_mutex;
|
||||
|
||||
log_callback_t log;
|
||||
logf_callback_t logf;
|
||||
store_callback_t store;
|
||||
int fast_epub_parse;
|
||||
float tn_qscale;
|
||||
} scan_ebook_ctx_t;
|
||||
|
||||
void parse_ebook(scan_ebook_ctx_t *ctx, vfile_t *f, const char *mime_str, document_t *doc);
|
||||
|
||||
void
|
||||
parse_ebook_mem(scan_ebook_ctx_t *ctx, void *buf, size_t buf_len, const char *mime_str, document_t *doc, int tn_only);
|
||||
|
||||
__always_inline
|
||||
static int is_epub(const char *mime_string) {
|
||||
return strcmp(mime_string, "application/epub+zip") == 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
246
third-party/libscan/libscan/font/font.c
vendored
Normal file
246
third-party/libscan/libscan/font/font.c
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
#include "font.h"
|
||||
|
||||
#include <ft2build.h>
|
||||
#include <freetype/freetype.h>
|
||||
#include "../util.h"
|
||||
|
||||
|
||||
__thread FT_Library ft_lib = NULL;
|
||||
|
||||
|
||||
typedef struct text_dimensions {
|
||||
unsigned int width;
|
||||
unsigned int height;
|
||||
unsigned int baseline;
|
||||
} text_dimensions_t;
|
||||
|
||||
typedef struct glyph {
|
||||
int top;
|
||||
int height;
|
||||
int width;
|
||||
int descent;
|
||||
int ascent;
|
||||
int advance_width;
|
||||
unsigned char *pixmap;
|
||||
} glyph_t;
|
||||
|
||||
|
||||
__always_inline
|
||||
int kerning_offset(char c, char pc, FT_Face face) {
|
||||
FT_Vector kerning;
|
||||
FT_Get_Kerning(face, c, pc, FT_KERNING_DEFAULT, &kerning);
|
||||
|
||||
return (int) (kerning.x / 64);
|
||||
}
|
||||
|
||||
__always_inline
|
||||
glyph_t ft_glyph_to_glyph(FT_GlyphSlot slot) {
|
||||
glyph_t glyph;
|
||||
|
||||
glyph.pixmap = slot->bitmap.buffer;
|
||||
|
||||
glyph.width = (int) slot->bitmap.width;
|
||||
glyph.height = (int) slot->bitmap.rows;
|
||||
glyph.top = slot->bitmap_top;
|
||||
glyph.advance_width = (int) slot->advance.x / 64;
|
||||
|
||||
glyph.descent = MAX(0, glyph.height - glyph.top);
|
||||
glyph.ascent = MAX(0, MAX(glyph.top, glyph.height) - glyph.descent);
|
||||
|
||||
return glyph;
|
||||
}
|
||||
|
||||
text_dimensions_t text_dimension(char *text, FT_Face face) {
|
||||
text_dimensions_t dimensions;
|
||||
|
||||
dimensions.width = 0;
|
||||
|
||||
int num_chars = (int) strlen(text);
|
||||
|
||||
unsigned int max_ascent = 0;
|
||||
int max_descent = 0;
|
||||
|
||||
char pc = 0;
|
||||
for (int i = 0; i < num_chars; i++) {
|
||||
char c = text[i];
|
||||
|
||||
FT_Load_Char(face, c, 0);
|
||||
glyph_t glyph = ft_glyph_to_glyph(face->glyph);
|
||||
|
||||
max_descent = MAX(max_descent, glyph.descent);
|
||||
max_ascent = MAX(max_ascent, MAX(glyph.height, glyph.ascent));
|
||||
|
||||
int kerning_x = kerning_offset(c, pc, face);
|
||||
dimensions.width += MAX(glyph.advance_width, glyph.width) + kerning_x;
|
||||
|
||||
pc = c;
|
||||
}
|
||||
|
||||
dimensions.height = max_ascent + max_descent;
|
||||
dimensions.baseline = max_descent;
|
||||
|
||||
return dimensions;
|
||||
}
|
||||
|
||||
void draw_glyph(glyph_t *glyph, int x, int y, struct text_dimensions text_info, unsigned char *bitmap) {
|
||||
unsigned int src = 0;
|
||||
unsigned int dst = y * text_info.width + x;
|
||||
unsigned int row_offset = text_info.width - glyph->width;
|
||||
unsigned int buf_len = text_info.width * text_info.height;
|
||||
|
||||
for (unsigned int sy = 0; sy < glyph->height; sy++) {
|
||||
for (unsigned int sx = 0; sx < glyph->width; sx++) {
|
||||
if (dst < buf_len) {
|
||||
bitmap[dst] |= glyph->pixmap[src];
|
||||
}
|
||||
src++;
|
||||
dst++;
|
||||
}
|
||||
dst += row_offset;
|
||||
}
|
||||
}
|
||||
|
||||
void bmp_format(dyn_buffer_t *buf, text_dimensions_t dimensions, const unsigned char *bitmap) {
|
||||
|
||||
dyn_buffer_write_short(buf, 0x4D42); // Magic
|
||||
dyn_buffer_write_int(buf, 0); // Size placeholder
|
||||
dyn_buffer_write_int(buf, 0x5157); //Reserved
|
||||
dyn_buffer_write_int(buf, 14 + 40 + 256 * 4); // pixels offset
|
||||
|
||||
dyn_buffer_write_int(buf, 40); // DIB size
|
||||
dyn_buffer_write_int(buf, (int) dimensions.width);
|
||||
dyn_buffer_write_int(buf, (int) dimensions.height);
|
||||
dyn_buffer_write_short(buf, 1); // Color planes
|
||||
dyn_buffer_write_short(buf, 8); // bits per pixel
|
||||
dyn_buffer_write_int(buf, 0); // compression
|
||||
dyn_buffer_write_int(buf, 0); // Ignored
|
||||
dyn_buffer_write_int(buf, 3800); // hres
|
||||
dyn_buffer_write_int(buf, 3800); // vres
|
||||
dyn_buffer_write_int(buf, 256); // Color count
|
||||
dyn_buffer_write_int(buf, 0); // Ignored
|
||||
|
||||
// RGBA32 Color table (Grayscale)
|
||||
for (int i = 255; i >= 0; i--) {
|
||||
dyn_buffer_write_int(buf, i + (i << 8) + (i << 16));
|
||||
}
|
||||
|
||||
// Pixel array: write from bottom to top, with rows padded to multiples of 4-bytes
|
||||
for (int y = (int) dimensions.height - 1; y >= 0; y--) {
|
||||
for (unsigned int x = 0; x < dimensions.width; x++) {
|
||||
dyn_buffer_write_char(buf, (char) bitmap[y * dimensions.width + x]);
|
||||
}
|
||||
while (buf->cur % 4 != 0) {
|
||||
dyn_buffer_write_char(buf, 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Size
|
||||
*(int *) ((char *) buf->buf + 2) = buf->cur;
|
||||
}
|
||||
|
||||
void parse_font(scan_font_ctx_t *ctx, vfile_t *f, document_t *doc) {
|
||||
if (ft_lib == NULL) {
|
||||
FT_Init_FreeType(&ft_lib);
|
||||
}
|
||||
|
||||
size_t buf_len = 0;
|
||||
void *buf = read_all(f, &buf_len);
|
||||
if (buf == NULL) {
|
||||
CTX_LOG_ERROR(f->filepath, "read_all() failed")
|
||||
return;
|
||||
}
|
||||
|
||||
FT_Face face;
|
||||
FT_Error err = FT_New_Memory_Face(ft_lib, (unsigned char *) buf, (int) buf_len, 0, &face);
|
||||
if (err != 0) {
|
||||
CTX_LOG_ERRORF(doc->filepath, "(font.c) FT_New_Memory_Face() returned error code [%d] %s", err,
|
||||
FT_Error_String(err))
|
||||
free(buf);
|
||||
return;
|
||||
}
|
||||
|
||||
char font_name[4096];
|
||||
|
||||
if (face->style_name == NULL || (strcmp(face->style_name, "?") == 0)) {
|
||||
if (face->family_name == NULL) {
|
||||
strcpy(font_name, "(null)");
|
||||
} else {
|
||||
strncpy(font_name, face->family_name, sizeof(font_name));
|
||||
}
|
||||
} else {
|
||||
snprintf(font_name, sizeof(font_name), "%s %s", face->family_name, face->style_name);
|
||||
}
|
||||
|
||||
meta_line_t *meta_name = malloc(sizeof(meta_line_t) + strlen(font_name));
|
||||
meta_name->key = MetaFontName;
|
||||
strcpy(meta_name->str_val, font_name);
|
||||
APPEND_META(doc, meta_name)
|
||||
|
||||
if (!ctx->enable_tn) {
|
||||
FT_Done_Face(face);
|
||||
free(buf);
|
||||
return;
|
||||
}
|
||||
|
||||
int pixel = 64;
|
||||
int num_chars = (int) strlen(font_name);
|
||||
|
||||
err = FT_Set_Pixel_Sizes(face, 0, pixel);
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "(font.c) FT_Set_Pixel_Sizes() returned error code [%d] %s", err,
|
||||
FT_Error_String(err))
|
||||
FT_Done_Face(face);
|
||||
free(buf);
|
||||
return;
|
||||
}
|
||||
|
||||
text_dimensions_t dimensions = text_dimension(font_name, face);
|
||||
unsigned char *bitmap = calloc(dimensions.width * dimensions.height, 1);
|
||||
|
||||
FT_Vector pen;
|
||||
pen.x = 0;
|
||||
|
||||
char pc = 0;
|
||||
for (int i = 0; i < num_chars; i++) {
|
||||
char c = font_name[i];
|
||||
|
||||
err = FT_Load_Char(face, c, FT_LOAD_NO_HINTING | FT_LOAD_RENDER);
|
||||
if (err != 0) {
|
||||
c = c >= 'a' && c <= 'z' ? c - 32 : c + 32;
|
||||
err = FT_Load_Char(face, c, FT_LOAD_NO_HINTING | FT_LOAD_RENDER);
|
||||
if (err != 0) {
|
||||
CTX_LOG_WARNINGF(doc->filepath, "(font.c) FT_Load_Char() returned error code [%d] %s", err,
|
||||
FT_Error_String(err))
|
||||
continue;
|
||||
}
|
||||
}
|
||||
glyph_t glyph = ft_glyph_to_glyph(face->glyph);
|
||||
|
||||
pen.x += kerning_offset(c, pc, face);
|
||||
if (pen.x <= 0) {
|
||||
pen.x = ABS(glyph.advance_width - glyph.width);
|
||||
}
|
||||
pen.y = dimensions.height - glyph.ascent - dimensions.baseline;
|
||||
|
||||
draw_glyph(&glyph, pen.x, pen.y, dimensions, bitmap);
|
||||
|
||||
pen.x += glyph.advance_width;
|
||||
pc = c;
|
||||
}
|
||||
|
||||
dyn_buffer_t bmp_data = dyn_buffer_create();
|
||||
bmp_format(&bmp_data, dimensions, bitmap);
|
||||
|
||||
APPEND_LONG_META(doc, MetaThumbnail, 1)
|
||||
ctx->store((char *) doc->path_md5, sizeof(doc->path_md5), (char *) bmp_data.buf, bmp_data.cur);
|
||||
|
||||
dyn_buffer_destroy(&bmp_data);
|
||||
free(bitmap);
|
||||
|
||||
FT_Done_Face(face);
|
||||
free(buf);
|
||||
}
|
||||
|
||||
void cleanup_font() {
|
||||
FT_Done_FreeType(ft_lib);
|
||||
}
|
||||
17
third-party/libscan/libscan/font/font.h
vendored
Normal file
17
third-party/libscan/libscan/font/font.h
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
#ifndef SCAN_FONT_H
|
||||
#define SCAN_FONT_H
|
||||
|
||||
#include "../scan.h"
|
||||
|
||||
|
||||
typedef struct {
|
||||
int enable_tn;
|
||||
log_callback_t log;
|
||||
logf_callback_t logf;
|
||||
store_callback_t store;
|
||||
} scan_font_ctx_t;
|
||||
|
||||
void parse_font(scan_font_ctx_t *ctx, vfile_t *f, document_t *doc);
|
||||
void cleanup_font();
|
||||
|
||||
#endif
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user