From 1cfceba51891b28e953a4fa1460c082d1927b0bb Mon Sep 17 00:00:00 2001 From: simon987 Date: Tue, 25 Apr 2023 08:49:50 -0400 Subject: [PATCH 1/3] wip --- CMakeLists.txt | 2 +- README.md | 2 +- src/cli.c | 32 +++++++++++++ src/cli.h | 11 +++++ src/database/database.c | 24 +--------- src/database/database.h | 13 +++-- src/database/database_fts.c | 88 ++++++++++++++++++++++++++++++++++ src/database/database_schema.c | 42 ++++++++++++++++ src/main.c | 38 ++++++++++++++- 9 files changed, 223 insertions(+), 29 deletions(-) create mode 100644 src/database/database_fts.c diff --git a/CMakeLists.txt b/CMakeLists.txt index 7d52308..2ca2be4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -58,7 +58,7 @@ add_executable(sist2 src/auth0/auth0_c_api.h src/auth0/auth0_c_api.cpp - src/database/database_stats.c src/database/database_schema.c) + src/database/database_stats.c src/database/database_schema.c src/database/database_fts.c) set_target_properties(sist2 PROPERTIES LINKER_LANGUAGE C) target_link_directories(sist2 PRIVATE BEFORE ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib/) diff --git a/README.md b/README.md index 00fe53c..47de10e 100644 --- a/README.md +++ b/README.md @@ -185,7 +185,7 @@ docker run --rm --entrypoint cat my-sist2-image /root/sist2 > sist2-x64-linux 3. Install vcpkg dependencies ```bash - vcpkg install curl[core,openssl] sqlite3 cpp-jwt pcre cjson brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libmagic libraw gumbo ffmpeg[core,avcodec,avformat,swscale,swresample] + vcpkg install curl[core,openssl] sqlite3[core,fts5] cpp-jwt pcre cjson brotli libarchive[core,bzip2,libxml2,lz4,lzma,lzo] pthread tesseract libxml2 libmupdf gtest mongoose libmagic libraw gumbo ffmpeg[core,avcodec,avformat,swscale,swresample] ``` 4. Build diff --git a/src/cli.c b/src/cli.c index 9546a27..d56d7e6 100644 --- a/src/cli.c +++ b/src/cli.c @@ -410,6 +410,33 @@ int index_args_validate(index_args_t *args, int argc, const char **argv) { return 0; } +int sqlite_index_args_validate(sqlite_index_args_t *args, int argc, const char **argv) { + + LogCtx.verbose = 1; + + if (argc < 2) { + fprintf(stderr, "Required positional argument: PATH.\n"); + return 1; + } + + char *index_path = abspath(argv[1]); + if (index_path == NULL) { + LOG_FATALF("cli.c", "Invalid PATH argument. File not found: %s", argv[1]); + } else { + args->index_path = index_path; + } + + if (args->search_index_path == NULL) { + LOG_FATAL("cli.c", "Missing required argument --search-index"); + } + + LOG_DEBUGF("cli.c", "arg index_path=%s", args->index_path); + LOG_DEBUGF("cli.c", "arg search_index_path=%s", args->search_index_path); + LOG_DEBUGF("cli.c", "arg optimize_index=%d", args->optimize_database); + + return 0; +} + int web_args_validate(web_args_t *args, int argc, const char **argv) { LogCtx.verbose = 1; @@ -554,6 +581,11 @@ index_args_t *index_args_create() { return args; } +sqlite_index_args_t *sqlite_index_args_create() { + sqlite_index_args_t *args = calloc(sizeof(sqlite_index_args_t), 1); + return args; +} + web_args_t *web_args_create() { web_args_t *args = calloc(sizeof(web_args_t), 1); return args; diff --git a/src/cli.h b/src/cli.h index 10d48c3..d539b02 100644 --- a/src/cli.h +++ b/src/cli.h @@ -66,6 +66,12 @@ typedef struct index_args { int incremental; } index_args_t; +typedef struct { + char *index_path; + char *search_index_path; + int optimize_database; +} sqlite_index_args_t; + typedef struct web_args { char *es_url; char *es_index; @@ -102,6 +108,8 @@ typedef struct exec_args { index_args_t *index_args_create(); +sqlite_index_args_t *sqlite_index_args_create(); + void index_args_destroy(index_args_t *args); web_args_t *web_args_create(); @@ -110,6 +118,8 @@ void web_args_destroy(web_args_t *args); int index_args_validate(index_args_t *args, int argc, const char **argv); +int sqlite_index_args_validate(sqlite_index_args_t *args, int argc, const char **argv); + int web_args_validate(web_args_t *args, int argc, const char **argv); exec_args_t *exec_args_create(); @@ -118,4 +128,5 @@ void exec_args_destroy(exec_args_t *args); int exec_args_validate(exec_args_t *args, int argc, const char **argv); + #endif diff --git a/src/database/database.c b/src/database/database.c index 1800038..f878f20 100644 --- a/src/database/database.c +++ b/src/database/database.c @@ -74,6 +74,8 @@ void database_initialize(database_t *db) { CRASH_IF_NOT_SQLITE_OK(sqlite3_exec(db->db, IndexDatabaseSchema, NULL, NULL, NULL)); } else if (db->type == IPC_CONSUMER_DATABASE || db->type == IPC_PRODUCER_DATABASE) { CRASH_IF_NOT_SQLITE_OK(sqlite3_exec(db->db, IpcDatabaseSchema, NULL, NULL, NULL)); + } else if (db->type == FTS_DATABASE) { + CRASH_IF_NOT_SQLITE_OK(sqlite3_exec(db->db, FtsDatabaseSchema, NULL, NULL, NULL)); } sqlite3_close(db->db); @@ -479,28 +481,6 @@ void database_write_thumbnail(database_t *db, const char *id, int num, void *dat } -//void database_create_fts_index(database_t *db, database_t *fts_db) { -// // In a separate file, -// -// // use database_initialize() to create FTS schema -// // if --force-reset, then truncate the tables first -// -// /* -// * create/append fts table -// * -// * create/append scalar index table with -// * id,index,size,mtime,mime -// * -// * create/append path index table with -// * index,path,depth -// * -// * content table is a view with SELECT UNION for all attached tables -// * random_seed column -// */ -// -// // INSERT INTO ft(ft) VALUES('optimize'); -//} - job_t *database_get_work(database_t *db, job_type_t job_type) { job_t *job; diff --git a/src/database/database.h b/src/database/database.h index feef5c0..f000cd7 100644 --- a/src/database/database.h +++ b/src/database/database.h @@ -10,6 +10,7 @@ typedef struct index_descriptor index_descriptor_t; extern const char *IpcDatabaseSchema; extern const char *IndexDatabaseSchema; +extern const char *FtsDatabaseSchema; typedef enum { INDEX_DATABASE, @@ -86,8 +87,6 @@ typedef struct { long size; } treemap_row_t; -static treemap_row_t null_treemap_row = {0, 0, 0}; - database_t *database_create(const char *filename, database_type_t type); @@ -116,7 +115,7 @@ cJSON *database_document_iter(database_iterator_t *); database_iterator_t *database_create_delete_list_iterator(database_t *db); -char * database_delete_list_iter(database_iterator_t *iter); +char *database_delete_list_iter(database_iterator_t *iter); #define database_delete_list_iter_foreach(element, iter) \ for (char *(element) = database_delete_list_iter(iter); (element) != NULL; (element) = database_delete_list_iter(iter)) @@ -160,8 +159,14 @@ cJSON *database_get_stats(database_t *db, database_stat_type_d type); #define CRASH_IF_NOT_SQLITE_OK(x) do { \ int return_value = x; \ if (return_value != SQLITE_OK) { \ - LOG_FATALF("database.c", "Sqlite error @ database.c:%d : (%d) %s", __LINE__, return_value, sqlite3_errmsg(db->db)); \ + LOG_FATALF("database.c", "Sqlite error @ %s:%d : (%d) %s", __BASE_FILE__, __LINE__, return_value, sqlite3_errmsg(db->db)); \ } \ } while (0) +void database_fts_attach(database_t *db, const char *fts_database_path); + +void database_fts_index(database_t *db); + +void database_fts_optimize(database_t *db); + #endif //SIST2_DATABASE_H \ No newline at end of file diff --git a/src/database/database_fts.c b/src/database/database_fts.c new file mode 100644 index 0000000..1791ec7 --- /dev/null +++ b/src/database/database_fts.c @@ -0,0 +1,88 @@ +#include "database.h" +#include "src/ctx.h" + +void database_fts_attach(database_t *db, const char *fts_database_path) { + + LOG_DEBUGF("database_fts.c", "Attaching to %s", fts_database_path); + + sqlite3_stmt *stmt; + CRASH_IF_NOT_SQLITE_OK(sqlite3_prepare_v2( + db->db, "ATTACH DATABASE ? AS fts" + "", -1, &stmt, NULL)); + + sqlite3_bind_text(stmt, 1, fts_database_path, -1, SQLITE_STATIC); + + CRASH_IF_STMT_FAIL(sqlite3_step(stmt)); + sqlite3_finalize(stmt); +} + +void database_fts_index(database_t *db) { + + LOG_INFO("database_fts.c", "Creating content table."); + + CRASH_IF_NOT_SQLITE_OK(sqlite3_exec( + db->db, + "WITH docs AS (SELECT document.id as id,\n" + " (SELECT id FROM descriptor) as index_id,\n" + " size,\n" + " document.json_data ->> 'path' as path,\n" + " length(document.json_data->>'path') - length(REPLACE(document.json_data->>'path', '/', '')) as path_depth,\n" + " document.json_data ->> 'mime' as mime,\n" + " mtime,\n" + " CASE\n" + " WHEN sc.json_data IS NULL THEN CASE\n" + " WHEN t.tag IS NULL THEN json_set(\n" + " document.json_data, '$._id',\n" + " document.id, '$.size',\n" + " document.size, '$.mtime',\n" + " document.mtime)\n" + " ELSE json_set(document.json_data, '$._id',\n" + " document.id, '$.size',\n" + " document.size, '$.mtime',\n" + " document.mtime, '$.tag',\n" + " json_group_array(t.tag)) END\n" + " ELSE CASE\n" + " WHEN t.tag IS NULL THEN json_patch(\n" + " json_set(document.json_data, '$._id', document.id, '$.size',\n" + " document.size, '$.mtime', document.mtime),\n" + " sc.json_data)\n" + " ELSE json_set(json_patch(document.json_data, sc.json_data), '$._id',\n" + " document.id, '$.size', document.size, '$.mtime',\n" + " document.mtime, '$.tag',\n" + " json_group_array(t.tag)) END END as json_data\n" + " FROM document\n" + " LEFT JOIN document_sidecar sc ON document.id = sc.id\n" + " LEFT JOIN tag t ON document.id = t.id\n" + " GROUP BY document.id)\n" + "INSERT\n" + "INTO fts.document_index (id, index_id, size, path, path_depth, mtime, mime, json_data)\n" + "SELECT *\n" + "FROM docs\n" + "WHERE true\n" + "on conflict (id, index_id) do update set size=excluded.size,\n" + " mtime=excluded.mtime,\n" + " json_data=excluded.json_data;", + NULL, NULL, NULL)); + + CRASH_IF_NOT_SQLITE_OK(sqlite3_exec( + db->db, + "DELETE\n" + "FROM fts.document_index\n" + "WHERE id IN (SELECT id FROM delete_list)\n" + " AND index_id = (SELECT id FROM descriptor);", + NULL, NULL, NULL + )); +} + +void database_fts_optimize(database_t *db) { + LOG_INFO("database_fts.c", "Optimizing search index."); + + CRASH_IF_NOT_SQLITE_OK(sqlite3_exec( + db->db, + "INSERT INTO search(search) VALUES('optimize');", + NULL, NULL, NULL)); + LOG_DEBUG("database_fts.c", "Optimized fts5 table."); + + CRASH_IF_NOT_SQLITE_OK(sqlite3_exec(db->db, "PRAGMA fts.optimize;", NULL, NULL, NULL)); + LOG_DEBUG("database_fts.c", "optimized indices."); +} diff --git a/src/database/database_schema.c b/src/database/database_schema.c index 23cb05f..a55ccd9 100644 --- a/src/database/database_schema.c +++ b/src/database/database_schema.c @@ -1,3 +1,45 @@ +const char *FtsDatabaseSchema = + "CREATE TABLE IF NOT EXISTS document_index (" + " id TEXT NOT NULL," + " index_id TEXT NOT NULL," + " size INTEGER NOT NULL," + " path TEXT NOT NULL," + " path_depth INT NOT NULL," + " mtime INTEGER NOT NULL," + " mime TEXT NOT NULL," + " json_data TEXT NOT NULL," + " PRIMARY KEY (id, index_id)" + ");" + "" + "CREATE VIEW IF NOT EXISTS document_view (rowid, name, content)" + " AS" + " SELECT rowid," + " json_data->>'name'," + " json_data->>'content'" + " FROM document_index;" + "" + "CREATE INDEX IF NOT EXISTS document_index_size_idx ON document_index (size);" + "CREATE INDEX IF NOT EXISTS document_index_mtime_idx ON document_index (mtime);" + "CREATE INDEX IF NOT EXISTS document_index_mime_idx ON document_index (mime);" + "CREATE INDEX IF NOT EXISTS document_index_path_idx ON document_index (path);" + "CREATE INDEX IF NOT EXISTS document_index_path_depth_idx ON document_index (path_depth);" + "" + "CREATE VIRTUAL TABLE IF NOT EXISTS search USING fts5 (" + " name," + " content," + " content='document_view'" + ");" + "" + "CREATE TRIGGER IF NOT EXISTS on_insert AFTER INSERT ON document_index BEGIN" + " INSERT INTO search(rowid, name, content) VALUES (new.rowid, new.json_data->>'name', new.json_data->>'content');" + "END;" + "CREATE TRIGGER IF NOT EXISTS on_delete AFTER DELETE ON document_index BEGIN" + " INSERT INTO search(search, name, content) VALUES('delete', old.json_data->>'name', old.json_data->>'content');" + "END;" + "CREATE TRIGGER IF NOT EXISTS on_update AFTER UPDATE ON document_index BEGIN" + " INSERT INTO search(search, rowid, name, content) VALUES('delete', old.rowid, old.json_data->>'name', old.json_data->>'content');" + " INSERT INTO search(rowid, name, content) VALUES (new.rowid, new.json_data->>'name', new.json_data->>'content');" + "END;"; const char *IpcDatabaseSchema = "CREATE TABLE parse_job (" diff --git a/src/main.c b/src/main.c index 0d15d11..433483b 100644 --- a/src/main.c +++ b/src/main.c @@ -22,6 +22,7 @@ static const char *const usage[] = { "sist2 scan [OPTION]... PATH", "sist2 index [OPTION]... INDEX", + "sist2 sqlite-index [OPTION]... INDEX", "sist2 web [OPTION]... INDEX...", "sist2 exec-script [OPTION]... INDEX", NULL, @@ -351,6 +352,23 @@ void sist2_index(index_args_t *args) { free(desc); } +void sist2_sqlite_index(sqlite_index_args_t *args) { + database_t *db = database_create(args->index_path, INDEX_DATABASE); + database_open(db); + + database_t *search_db = database_create(args->search_index_path, FTS_DATABASE); + database_initialize(search_db); + + database_fts_attach(db, args->search_index_path); + + database_fts_index(db); + if (args->optimize_database) { + database_fts_optimize(db); + } + + database_close(db, FALSE); +} + void sist2_exec_script(exec_args_t *args) { LogCtx.verbose = TRUE; @@ -436,6 +454,7 @@ int main(int argc, const char *argv[]) { index_args_t *index_args = index_args_create(); web_args_t *web_args = web_args_create(); exec_args_t *exec_args = exec_args_create(); + sqlite_index_args_t *sqlite_index_args = sqlite_index_args_create(); int arg_version = 0; @@ -445,6 +464,7 @@ int main(int argc, const char *argv[]) { char *common_script_path = NULL; int common_async_script = 0; int common_threads = 0; + int common_optimize_database = 0; struct argparse_option options[] = { OPT_HELP(), @@ -471,7 +491,7 @@ int main(int argc, const char *argv[]) { OPT_STRING('o', "output", &scan_args->output, "Output index file path. DEFAULT: index.sist2"), OPT_BOOLEAN(0, "incremental", &scan_args->incremental, "If the output file path exists, only scan new or modified files."), - OPT_BOOLEAN(0, "optimize-index", &scan_args->optimize_database, + OPT_BOOLEAN(0, "optimize-index", &common_optimize_database, "Defragment index file after scan to reduce its file size."), OPT_STRING(0, "rewrite-url", &scan_args->rewrite_url, "Serve files from this url instead of from disk."), OPT_STRING(0, "name", &scan_args->name, "Index display name. DEFAULT: index"), @@ -520,6 +540,11 @@ int main(int argc, const char *argv[]) { OPT_INTEGER(0, "batch-size", &index_args->batch_size, "Index batch size. DEFAULT: 70"), OPT_BOOLEAN('f', "force-reset", &index_args->force_reset, "Reset Elasticsearch mappings and settings."), + OPT_GROUP("sqlite-index options"), + OPT_STRING(0, "search-index", &sqlite_index_args->search_index_path, "Path to search index. Will be created if it does not exist yet."), + OPT_BOOLEAN(0, "optimize-index", &common_optimize_database, + "Optimize search index file for smaller size and faster queries."), + OPT_GROUP("Web options"), OPT_STRING(0, "es-url", &common_es_url, "Elasticsearch url. DEFAULT: http://localhost:9200"), OPT_BOOLEAN(0, "es-insecure-ssl", &common_es_insecure_ssl, @@ -586,6 +611,9 @@ int main(int argc, const char *argv[]) { exec_args->async_script = common_async_script; index_args->async_script = common_async_script; + scan_args->optimize_database = common_optimize_database; + sqlite_index_args->optimize_database = common_optimize_database; + if (argc == 0) { argparse_usage(&argparse); goto end; @@ -605,6 +633,14 @@ int main(int argc, const char *argv[]) { } sist2_index(index_args); + } else if (strcmp(argv[0], "sqlite-index") == 0) { + + int err = sqlite_index_args_validate(sqlite_index_args, argc, argv); + if (err != 0) { + goto end; + } + sist2_sqlite_index(sqlite_index_args); + } else if (strcmp(argv[0], "web") == 0) { int err = web_args_validate(web_args, argc, argv); From 944c224904eef8fdcf2174b8cce31e7af4a551cd Mon Sep 17 00:00:00 2001 From: simon987 Date: Thu, 18 May 2023 14:16:11 -0400 Subject: [PATCH 2/3] SQLite search backend --- .gitignore | 4 +- CMakeLists.txt | 10 +- README.md | 41 +- docs/USAGE.md | 54 +- scripts/sqlite_extension.c | 84 ++ scripts/sqlite_extension_compile.sh | 1 + sist2-vue/src/App.vue | 9 + sist2-vue/src/Sist2Api.ts | 410 ++++++++- ...st2Query.ts => Sist2ElasticsearchQuery.ts} | 5 +- sist2-vue/src/Sist2SqliteQuery.ts | 111 +++ sist2-vue/src/components/DebugInfo.vue | 69 +- sist2-vue/src/components/DocCard.vue | 172 ++-- sist2-vue/src/components/DocFileTitle.vue | 2 +- sist2-vue/src/components/DocListItem.vue | 2 +- sist2-vue/src/components/FullThumbnail.vue | 2 +- sist2-vue/src/components/LazyContentDiv.vue | 65 +- sist2-vue/src/components/MimePicker.vue | 2 +- sist2-vue/src/components/NavBar.vue | 2 +- sist2-vue/src/components/PathTree.vue | 334 ++++--- sist2-vue/src/components/ResultsCard.vue | 199 +++-- sist2-vue/src/components/SearchBar.vue | 2 +- sist2-vue/src/components/TagContainer.vue | 470 +++++----- sist2-vue/src/components/TagPicker.vue | 318 +++---- sist2-vue/src/i18n/messages.ts | 6 +- sist2-vue/src/store/index.ts | 21 +- sist2-vue/src/views/Configuration.vue | 18 +- sist2-vue/src/views/FilePage.vue | 4 +- sist2-vue/src/views/SearchPage.vue | 42 +- sist2-vue/src/views/StatsPage.vue | 96 +-- src/cli.c | 27 +- src/cli.h | 8 +- src/ctx.h | 2 + src/database/database.c | 178 +++- src/database/database.h | 67 +- src/database/database_fts.c | 812 ++++++++++++++++-- src/database/database_schema.c | 83 +- src/database/database_stats.c | 2 + src/index/elastic.c | 6 +- src/main.c | 37 +- src/sist.h | 4 +- src/util.h | 3 + src/web/serve.c | 344 ++++---- src/web/serve.h | 5 + src/web/web_fts.c | 378 ++++++++ src/web/web_util.c | 35 + src/web/web_util.h | 11 +- 46 files changed, 3261 insertions(+), 1296 deletions(-) create mode 100644 scripts/sqlite_extension.c create mode 100644 scripts/sqlite_extension_compile.sh rename sist2-vue/src/{Sist2Query.ts => Sist2ElasticsearchQuery.ts} (98%) create mode 100644 sist2-vue/src/Sist2SqliteQuery.ts create mode 100644 src/web/web_fts.c diff --git a/.gitignore b/.gitignore index 0ca141d..8281d97 100644 --- a/.gitignore +++ b/.gitignore @@ -43,4 +43,6 @@ src/magic_generated.c src/index/static_generated.c *.sist2 *-shm -*-journal \ No newline at end of file +*-journal +.vscode +*.fts \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index 2ca2be4..6482f23 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,7 +31,8 @@ add_subdirectory(third-party/libscan) set(ARGPARSE_SHARED off) add_subdirectory(third-party/argparse) -add_executable(sist2 +add_executable( + sist2 # argparse third-party/argparse/argparse.h third-party/argparse/argparse.c @@ -58,7 +59,11 @@ add_executable(sist2 src/auth0/auth0_c_api.h src/auth0/auth0_c_api.cpp - src/database/database_stats.c src/database/database_schema.c src/database/database_fts.c) + src/database/database_stats.c + src/database/database_schema.c + src/database/database_fts.c + src/web/web_fts.c +) set_target_properties(sist2 PROPERTIES LINKER_LANGUAGE C) target_link_directories(sist2 PRIVATE BEFORE ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib/) @@ -126,6 +131,7 @@ else () PRIVATE -Ofast +# -g -fno-stack-protector -fomit-frame-pointer -w diff --git a/README.md b/README.md index 47de10e..4eafb08 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ services: - "discovery.type=single-node" - "ES_JAVA_OPTS=-Xms2g -Xmx2g" sist2-admin: - image: simon987/sist2:3.0.4-x64-linux + image: simon987/sist2:3.0.7-x64-linux restart: unless-stopped volumes: - ./sist2-admin-data/:/sist2-admin/ @@ -62,12 +62,14 @@ Navigate to http://localhost:8080/ to configure sist2-admin. ### Using the executable file *(Linux/WSL only)* -1. Have an Elasticsearch (>= 6.8.X, ideally >=7.14.0) instance running - 1. Download [from official website](https://www.elastic.co/downloads/elasticsearch) - 2. *(or)* Run using docker: - ```bash - docker run -d -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.17.9 - ``` +1. Choose search backend (See [comparison](#search-backends)): + * **Elasticsearch**: have an Elasticsearch (version >= 6.8.X, ideally >=7.14.0) instance running + 1. Download [from official website](https://www.elastic.co/downloads/elasticsearch) + 2. *(or)* Run using docker: + ```bash + docker run -d -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.17.9 + ``` + * **SQLite**: No installation required 2. Download the [latest sist2 release](https://github.com/simon987/sist2/releases). Select the file corresponding to your CPU architecture and mark the binary as executable with `chmod +x`. @@ -76,7 +78,9 @@ Navigate to http://localhost:8080/ to configure sist2-admin. Example usage: 1. Scan a directory: `sist2 scan ~/Documents --output ./documents.sist2` -2. Push index to Elasticsearch: `sist2 index ./documents.sist2` +2. Prepare search index: + * **Elasticsearch**: `sist2 index --es-url http://localhost:9200 ./documents.sist2` + * **SQLite**: `sist2 index --search-index ./search.sist2 ./documents.sist2` 3. Start web interface: `sist2 web ./documents.sist2` ## Format support @@ -136,9 +140,27 @@ sist2 scan --ocr-images --ocr-lang eng ~/Images/Screenshots/ sist2 scan --ocr-ebooks --ocr-images --ocr-lang eng+chi_sim ~/Chinese-Bilingual/ ``` +### Search backends + +sist2 v3.0.7+ supports SQLite search backend. The SQLite search backend has +fewer features and generally comparable query performance for medium-size +indices, but it uses much less memory and is easier to set up. + +| | SQLite | Elasticsearch | +|----------------------------------------------|:----------------------------------------:|:-------------------------------------------------------------------------------------------------------------------------------------:| +| Requires separate search engine installation | | ✓ | +| Memory footprint | ~20MB | >500MB | +| Query syntax | [fts5](https://www.sqlite.org/fts5.html) | [query_string](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#query-string-syntax) | +| Fuzzy search | | ✓ | +| Media Types tree real-time updating | | ✓ | +| Search in file `path` | | ✓ | +| Manual tagging | ✓ | ✓ | +| User scripts | | ✓ | +| Media Type breakdown for search results | | ✓ | + ### NER -sist2 v3.0.4+ supports named-entity recognition (NER). Simply add a supported repository URL to +sist2 v3.0.4+ supports named-entity recognition (NER). Simply add a supported repository URL to **Configuration** > **Machine learning options** > **Model repositories** to enable it. @@ -151,7 +173,6 @@ See [simon987/sist2-ner-models](https://github.com/simon987/sist2-ner-models) fo |---------------------------------------------------------------------------------------------------------|-----------------------------------------|---------| | [simon987/sist2-ner-models](https://raw.githubusercontent.com/simon987/sist2-ner-models/main/repo.json) | [simon987](https://github.com/simon987) | General | -
Screenshot diff --git a/docs/USAGE.md b/docs/USAGE.md index 75ae1b7..43be817 100644 --- a/docs/USAGE.md +++ b/docs/USAGE.md @@ -3,6 +3,7 @@ ``` Usage: sist2 scan [OPTION]... PATH or: sist2 index [OPTION]... INDEX + or: sist2 sqlite-index [OPTION]... INDEX or: sist2 web [OPTION]... INDEX... or: sist2 exec-script [OPTION]... INDEX @@ -54,9 +55,13 @@ Index options --batch-size= Index batch size. DEFAULT: 70 -f, --force-reset Reset Elasticsearch mappings and settings. +sqlite-index options + --search-index= Path to search index. Will be created if it does not exist yet. + Web options --es-url= Elasticsearch url. DEFAULT: http://localhost:9200 --es-insecure-ssl Do not verify SSL connections to Elasticsearch. + --search-index= Path to SQLite search index. --es-index= Elasticsearch index name. DEFAULT: sist2 --bind= Listen for connections on this address. DEFAULT: localhost:4090 --auth= Basic auth in user:password format @@ -111,53 +116,54 @@ sist scan ~/Documents -o ./documents.sist2 --incremental sist scan ~/Documents -o ./documents.sist2 --incremental ``` -### Index examples +### Index documents to Elasticsearch search backend -**Push to elasticsearch** ```bash -sist2 index --force-reset --batch-size 1000 --es-url http://localhost:9200 ./my_index/ -sist2 index ./my_index/ +sist2 index --force-reset --batch-size 1000 --es-url http://localhost:9200 ./my_index.sist2 +sist2 index ./my_index.sist2 +``` + +#### Index documents to SQLite search backend +```bash +# The search index will be created if it does not exist already +sist2 sqlite-index ./index1.sist2 --search-index search.sist2 +sist2 sqlite-index ./index2.sist2 --search-index search.sist2 ``` **Save index in JSON format** ```bash -sist2 index --print ./my_index/ > my_index.ndjson +sist2 index --print ./my_index.sist2 > my_index.ndjson ``` **Inspect contents of an index** ```bash -sist2 index --print ./my_index/ | jq | less +sist2 index --print ./my_index.sist2 | jq | less ``` ## Web -### Web options - * `--es-url=` Elasticsearch url. - * `--es-index` - Elasticsearch index name. DEFAULT=sist2 - * `--bind=` Listen on this address. - * `--auth=` Basic auth in user:password format - * `--tag-auth=` Basic auth in user:password format. Works the same way as the - `--auth` argument, but authentication is only applied the `/tag/` endpoint. - * `--tagline=` When specified, will replace the default tagline in the navbar. - * `--dev` Serve html & js files from disk (for development, used to modify frontend files without having to recompile) - * `--lang=` Set the default web UI language (See #180 for a list of supported languages, default - is `en`). The user can change the language in the configuration page - * `--auth0-audience`, `--auth0-domain`, `--auth0-client-id`, `--auth0-public-key-file` See [Authentication with Auth0](auth0.md) - ### Web examples -**Single index** +**Single index (Elasticsearch backend)** ```bash -sist2 web --auth admin:hunter2 --bind 0.0.0.0:8888 my_index +sist2 web --auth admin:hunter2 --bind 0.0.0.0:8888 my_index.sist2 ``` -**Multiple indices** +**Multiple indices (Elasticsearch backend)** ```bash # Indices will be displayed in this order in the web interface -sist2 web index1 index2 index3 index4 +sist2 web index1.sist2 index2.sist2 index3.sist2 index4.sist2 ``` +**SQLite search backend** +```bash +sist2 web --search-index search.sist2 index1.sist2 +``` + +#### Auth0 authentication + +See [auth0.md](auth0.md) + ### rewrite_url When the `rewrite_url` field is not empty, the web module ignores the `root` diff --git a/scripts/sqlite_extension.c b/scripts/sqlite_extension.c new file mode 100644 index 0000000..827b48f --- /dev/null +++ b/scripts/sqlite_extension.c @@ -0,0 +1,84 @@ +#include +#include +#include + +SQLITE_EXTENSION_INIT1 + +static int sep_rfind(const char *str) { + for (int i = (int) strlen(str); i >= 0; i--) { + if (str[i] == '/') { + return i; + } + } + return -1; +} + +void path_parent_func(sqlite3_context *ctx, int argc, sqlite3_value **argv) { + if (argc != 1 || sqlite3_value_type(argv[0]) != SQLITE_TEXT) { + sqlite3_result_error(ctx, "Invalid parameters", -1); + } + + const char *value = (const char *) sqlite3_value_text(argv[0]); + + int stop = sep_rfind(value); + if (stop == -1) { + sqlite3_result_null(ctx); + return; + } + char parent[4096 * 3]; + strncpy(parent, value, stop); + + sqlite3_result_text(ctx, parent, stop, SQLITE_TRANSIENT); +} + +void random_func(sqlite3_context *ctx, int argc, sqlite3_value **argv) { + if (argc != 1 || sqlite3_value_type(argv[0]) != SQLITE_INTEGER) { + sqlite3_result_error(ctx, "Invalid parameters", -1); + } + + char state_buf[32] = {0,}; + struct random_data buf; + int result; + + long seed = sqlite3_value_int64(argv[0]); + + initstate_r((int) seed, state_buf, sizeof(state_buf), &buf); + + random_r(&buf, &result); + + sqlite3_result_int(ctx, result); +} + + +int sqlite3_extension_init( + sqlite3 *db, + char **pzErrMsg, + const sqlite3_api_routines *pApi +) { + SQLITE_EXTENSION_INIT2(pApi); + + + sqlite3_create_function( + db, + "path_parent", + 1, + SQLITE_UTF8, + NULL, + path_parent_func, + NULL, + NULL + ); + + sqlite3_create_function( + db, + "random_seeded", + 1, + SQLITE_UTF8, + NULL, + random_func, + NULL, + NULL + ); + + return SQLITE_OK; +} \ No newline at end of file diff --git a/scripts/sqlite_extension_compile.sh b/scripts/sqlite_extension_compile.sh new file mode 100644 index 0000000..e3422f1 --- /dev/null +++ b/scripts/sqlite_extension_compile.sh @@ -0,0 +1 @@ +gcc -I/mnt/work/vcpkg/installed/x64-linux/include -g -fPIC -shared sqlite_extension.c -o sist2funcs.so \ No newline at end of file diff --git a/sist2-vue/src/App.vue b/sist2-vue/src/App.vue index 34ed80b..2d2bd3d 100644 --- a/sist2-vue/src/App.vue +++ b/sist2-vue/src/App.vue @@ -21,6 +21,8 @@ import {mapActions, mapGetters, mapMutations} from "vuex"; import Sist2Api from "@/Sist2Api"; import ModelsRepo from "@/ml/modelsRepo"; import {setupAuth0} from "@/main"; +import Sist2ElasticsearchQuery from "@/Sist2ElasticsearchQuery"; +import Sist2SqliteQuery from "@/Sist2SqliteQuery"; export default { components: {NavBar}, @@ -88,6 +90,13 @@ export default { this.setSist2Info(data); this.setIndices(data.indices) + + if (Sist2Api.backend() === "sqlite") { + Sist2Api.init(Sist2SqliteQuery.searchQuery); + this.$store.commit("setUiSqliteMode", true); + } else { + Sist2Api.init(Sist2ElasticsearchQuery.searchQuery); + } }); }, methods: { diff --git a/sist2-vue/src/Sist2Api.ts b/sist2-vue/src/Sist2Api.ts index 6722b5b..054b151 100644 --- a/sist2-vue/src/Sist2Api.ts +++ b/sist2-vue/src/Sist2Api.ts @@ -1,5 +1,7 @@ import axios from "axios"; import {ext, strUnescape, lum} from "./util"; +import Sist2Query from "@/Sist2ElasticsearchQuery"; +import store from "@/store"; export interface EsTag { id: string @@ -99,12 +101,22 @@ export interface EsResult { class Sist2Api { - private baseUrl: string + private readonly baseUrl: string + private sist2Info: any + private queryfunc: Function; constructor(baseUrl: string) { this.baseUrl = baseUrl; } + init(queryFunc: Function) { + this.queryfunc = queryFunc; + } + + backend() { + return this.sist2Info.searchBackend; + } + getSist2Info(): Promise { return axios.get(`${this.baseUrl}i`).then(resp => { const indices = resp.data.indices as Index[]; @@ -119,6 +131,8 @@ class Sist2Api { } as Index; }); + this.sist2Info = resp.data; + return resp.data; }) } @@ -219,6 +233,14 @@ class Sist2Api { } as Tag; } + search(): Promise { + if (this.backend() == "sqlite") { + return this.ftsQuery(this.queryfunc()) + } else { + return this.esQuery(this.queryfunc()); + } + } + esQuery(query: any): Promise { return axios.post(`${this.baseUrl}es`, query).then(resp => { const res = resp.data as EsResult; @@ -237,7 +259,30 @@ class Sist2Api { }); } - getMimeTypes(query = undefined) { + ftsQuery(query: any): Promise { + return axios.post(`${this.baseUrl}fts/search`, query).then(resp => { + const res = resp.data as any; + + if (res.hits.hits) { + res.hits.hits.forEach(hit => { + hit["_source"]["name"] = strUnescape(hit["_source"]["name"]); + hit["_source"]["path"] = strUnescape(hit["_source"]["path"]); + + this.setHitProps(hit); + this.setHitTags(hit); + + if ("highlight" in hit) { + hit["highlight"]["name"] = [hit["highlight"]["name"]]; + hit["highlight"]["content"] = [hit["highlight"]["content"]]; + } + }); + } + + return res; + }); + } + + private getMimeTypesEs(query) { const AGGS = { mimeTypes: { terms: { @@ -258,48 +303,70 @@ class Sist2Api { } return this.esQuery(query).then(resp => { - const mimeMap: any[] = []; - const buckets = resp["aggregations"]["mimeTypes"]["buckets"]; + return resp["aggregations"]["mimeTypes"]["buckets"].map(bucket => ({ + mime: bucket.key, + count: bucket.doc_count + })); - buckets.sort((a: any, b: any) => a.key > b.key).forEach((bucket: any) => { - const tmp = bucket["key"].split("/"); - const category = tmp[0]; - const mime = tmp[1]; + }); + } - let category_exists = false; + private getMimeTypesSqlite(): Promise<[{ mime: string, count: number }]> { + return axios.get(`${this.baseUrl}fts/mimetypes`) + .then(resp => { + return resp.data; + }); + } - const child = { - "id": bucket["key"], - "text": `${mime} (${bucket["doc_count"]})` - }; + async getMimeTypes(query = undefined) { + let buckets; - mimeMap.forEach(node => { - if (node.text === category) { - node.children.push(child); - category_exists = true; - } - }); + if (this.backend() == "sqlite") { + buckets = await this.getMimeTypesSqlite(); + } else { + buckets = await this.getMimeTypesEs(query); + } - if (!category_exists) { - mimeMap.push({text: category, children: [child], id: category}); - } - }) + const mimeMap: any[] = []; + + buckets.sort((a: any, b: any) => a.mime > b.mime).forEach((bucket: any) => { + const tmp = bucket.mime.split("/"); + const category = tmp[0]; + const mime = tmp[1]; + + let category_exists = false; + + const child = { + "id": bucket.mime, + "text": `${mime} (${bucket.count})` + }; mimeMap.forEach(node => { - if (node.children) { - node.children.sort((a, b) => a.id.localeCompare(b.id)); + if (node.text === category) { + node.children.push(child); + category_exists = true; } - }) - mimeMap.sort((a, b) => a.id.localeCompare(b.id)) + }); - return {buckets, mimeMap}; - }); + if (!category_exists) { + mimeMap.push({text: category, children: [child], id: category}); + } + }) + + mimeMap.forEach(node => { + if (node.children) { + node.children.sort((a, b) => a.id.localeCompare(b.id)); + } + }) + mimeMap.sort((a, b) => a.id.localeCompare(b.id)) + + return {buckets, mimeMap}; } _createEsTag(tag: string, count: number): EsTag { const tokens = tag.split("."); - if (/.*\.#[0-9a-f]{6}/.test(tag)) { + if (/.*\.#[0-9a-fA-F]{6}/.test(tag)) { return { id: tokens.slice(0, -1).join("."), color: tokens.pop(), @@ -316,32 +383,48 @@ class Sist2Api { }; } - getTags() { + private getTagsEs() { return this.esQuery({ aggs: { tags: { terms: { field: "tag", - size: 10000 + size: 65535 } } }, size: 0, }).then(resp => { - const seen = new Set(); - - const tags = resp["aggregations"]["tags"]["buckets"] + return resp["aggregations"]["tags"]["buckets"] .sort((a: any, b: any) => a["key"].localeCompare(b["key"])) .map((bucket: any) => this._createEsTag(bucket["key"], bucket["doc_count"])); + }); + } - // Remove duplicates (same tag with different color) - return tags.filter((t: EsTag) => { - if (seen.has(t.id)) { - return false; - } - seen.add(t.id); - return true; + private getTagsSqlite() { + return axios.get(`${this.baseUrl}/fts/tags`) + .then(resp => { + return resp.data.map(tag => this._createEsTag(tag.tag, tag.count)) }); + } + + async getTags(): Promise { + let tags; + if (this.backend() == "sqlite") { + tags = await this.getTagsSqlite(); + } else { + tags = await this.getTagsEs(); + } + + // Remove duplicates (same tag with different color) + const seen = new Set(); + + return tags.filter((t: EsTag) => { + if (seen.has(t.id)) { + return false; + } + seen.add(t.id); + return true; }); } @@ -361,6 +444,144 @@ class Sist2Api { }); } + searchPaths(indexId, minDepth, maxDepth, prefix = null) { + if (this.backend() == "sqlite") { + return this.searchPathsSqlite(indexId, minDepth, minDepth, prefix); + } else { + return this.searchPathsEs(indexId, minDepth, maxDepth, prefix); + } + } + + private searchPathsSqlite(indexId, minDepth, maxDepth, prefix) { + return axios.post(`${this.baseUrl}fts/paths`, { + indexId, minDepth, maxDepth, prefix + }).then(resp => { + return resp.data; + }); + } + + private searchPathsEs(indexId, minDepth, maxDepth, prefix): Promise<[{ path: string, count: number }]> { + + const query = { + query: { + bool: { + filter: [ + {term: {index: indexId}}, + {range: {_depth: {gte: minDepth, lte: maxDepth}}}, + ] + } + }, + aggs: { + paths: { + terms: { + field: "path", + size: 10000 + } + } + }, + size: 0 + }; + + if (prefix != null) { + query["query"]["bool"]["must"] = { + prefix: { + path: prefix, + } + }; + } + + return this.esQuery(query).then(resp => { + const buckets = resp["aggregations"]["paths"]["buckets"]; + + if (!buckets) { + return []; + } + + return buckets + .map(bucket => ({ + path: bucket.key, + count: bucket.doc_count + })); + }); + } + + private getDateRangeSqlite() { + return axios.get(`${this.baseUrl}fts/dateRange`) + .then(resp => ({ + min: resp.data.dateMin, + max: resp.data.dateMax, + })); + } + + getDateRange(): Promise<{ min: number, max: number }> { + if (this.backend() == "sqlite") { + return this.getDateRangeSqlite(); + } else { + return this.getDateRangeEs(); + } + } + + private getDateRangeEs() { + return this.esQuery({ + // TODO: filter current selected indices + aggs: { + dateMin: {min: {field: "mtime"}}, + dateMax: {max: {field: "mtime"}}, + }, + size: 0 + }).then(res => { + const range = { + min: res.aggregations.dateMin.value, + max: res.aggregations.dateMax.value, + } + + if (range.min == null) { + range.min = 0; + range.max = 1; + } else if (range.min == range.max) { + range.max += 1; + } + + return range; + }); + } + + private getPathSuggestionsSqlite(text: string) { + return axios.post(`${this.baseUrl}fts/paths`, { + prefix: text, + minDepth: 1, + maxDepth: 10000 + }).then(resp => { + return resp.data.map(bucket => bucket.path); + }) + } + + private getPathSuggestionsEs(text) { + return this.esQuery({ + suggest: { + path: { + prefix: text, + completion: { + field: "suggest-path", + skip_duplicates: true, + size: 10000 + } + } + } + }).then(resp => { + return resp["suggest"]["path"][0]["options"] + .map(opt => opt["_source"]["path"]); + }); + } + + getPathSuggestions(text: string): Promise { + if (this.backend() == "sqlite") { + return this.getPathSuggestionsSqlite(text); + } else { + return this.getPathSuggestionsEs(text) + } + } + getTreemapStat(indexId: string) { return `${this.baseUrl}s/${indexId}/TMAP`; } @@ -376,6 +597,111 @@ class Sist2Api { getDateStat(indexId: string) { return `${this.baseUrl}s/${indexId}/DAGG`; } + + private getDocumentEs(docId: string, highlight: boolean, fuzzy: boolean) { + const query = Sist2Query.searchQuery(); + + if (highlight) { + const fields = fuzzy + ? {"content.nGram": {}} + : {content: {}}; + + query.highlight = { + pre_tags: [""], + post_tags: [""], + number_of_fragments: 0, + fields, + }; + + if (!store.state.sist2Info.esVersionLegacy) { + query.highlight.max_analyzed_offset = 999_999; + } + } + + if ("function_score" in query.query) { + query.query = query.query.function_score.query; + } + + if (!("must" in query.query.bool)) { + query.query.bool.must = []; + } else if (!Array.isArray(query.query.bool.must)) { + query.query.bool.must = [query.query.bool.must]; + } + + query.query.bool.must.push({match: {_id: docId}}); + + delete query["sort"]; + delete query["aggs"]; + delete query["search_after"]; + delete query.query["function_score"]; + + query._source = { + includes: ["content", "name", "path", "extension"] + } + + query.size = 1; + + return this.esQuery(query).then(resp => { + if (resp.hits.hits.length === 1) { + return resp.hits.hits[0]; + } + return null; + }); + } + + private getDocumentSqlite(docId: string): Promise { + return axios.get(`${this.baseUrl}/fts/d/${docId}`) + .then(resp => ({ + _source: resp.data + } as EsHit)); + } + + getDocument(docId: string, highlight: boolean, fuzzy: boolean): Promise { + if (this.backend() == "sqlite") { + return this.getDocumentSqlite(docId); + } else { + return this.getDocumentEs(docId, highlight, fuzzy); + } + } + + getTagSuggestions(prefix: string): Promise { + if (this.backend() == "sqlite") { + return this.getTagSuggestionsSqlite(prefix); + } else { + return this.getTagSuggestionsEs(prefix); + } + } + + private getTagSuggestionsSqlite(prefix): Promise { + return axios.post(`${this.baseUrl}/fts/suggestTags`, prefix) + .then(resp => (resp.data)); + } + + private getTagSuggestionsEs(prefix): Promise { + return this.esQuery({ + suggest: { + tag: { + prefix: prefix, + completion: { + field: "suggest-tag", + skip_duplicates: true, + size: 10000 + } + } + } + }).then(resp => { + const result = []; + resp["suggest"]["tag"][0]["options"].map(opt => opt["_source"]["tag"]).forEach(tags => { + tags.forEach(tag => { + const t = tag.slice(0, -8); + if (!result.find(x => x.slice(0, -8) === t)) { + result.push(tag); + } + }); + }); + return result; + }); + } } export default new Sist2Api(""); \ No newline at end of file diff --git a/sist2-vue/src/Sist2Query.ts b/sist2-vue/src/Sist2ElasticsearchQuery.ts similarity index 98% rename from sist2-vue/src/Sist2Query.ts rename to sist2-vue/src/Sist2ElasticsearchQuery.ts index c25c375..dab0231 100644 --- a/sist2-vue/src/Sist2Query.ts +++ b/sist2-vue/src/Sist2ElasticsearchQuery.ts @@ -67,7 +67,7 @@ interface SortMode { } -class Sist2Query { +class Sist2ElasticsearchQuery { searchQuery(blankSearch: boolean = false): any { @@ -249,4 +249,5 @@ class Sist2Query { } } -export default new Sist2Query(); \ No newline at end of file + +export default new Sist2ElasticsearchQuery(); \ No newline at end of file diff --git a/sist2-vue/src/Sist2SqliteQuery.ts b/sist2-vue/src/Sist2SqliteQuery.ts new file mode 100644 index 0000000..395ff15 --- /dev/null +++ b/sist2-vue/src/Sist2SqliteQuery.ts @@ -0,0 +1,111 @@ +import store from "./store"; +import {EsHit, Index} from "@/Sist2Api"; + +const SORT_MODES = { + score: { + "sort": "score", + }, + random: { + "sort": "random" + }, + dateAsc: { + "sort": "mtime" + }, + dateDesc: { + "sort": "mtime", + "sortAsc": false + }, + sizeAsc: { + "sort": "size", + }, + sizeDesc: { + "sort": "size", + "sortAsc": false + }, + nameAsc: { + "sort": "name", + }, + nameDesc: { + "sort": "name", + "sortAsc": false + } +} as any; + +interface SortMode { + text: string + mode: any[] + key: (hit: EsHit) => any +} + + +class Sist2ElasticsearchQuery { + + searchQuery(): any { + + const getters = store.getters; + + const searchText = getters.searchText; + const pathText = getters.pathText; + const sizeMin = getters.sizeMin; + const sizeMax = getters.sizeMax; + const dateMin = getters.dateMin; + const dateMax = getters.dateMax; + const size = getters.size; + const after = getters.lastDoc; + const selectedIndexIds = getters.selectedIndices.map((idx: Index) => idx.id) + const selectedMimeTypes = getters.selectedMimeTypes; + const selectedTags = getters.selectedTags; + + const q = { + "pageSize": size + } + + Object.assign(q, SORT_MODES[getters.sortMode]); + + if (!after) { + q["fetchAggregations"] = true; + } + if (searchText) { + q["query"] = searchText; + } + if (pathText) { + q["path"] = pathText.endsWith("/") ? pathText.slice(0, -1) : pathText; + } + if (sizeMin) { + q["sizeMin"] = sizeMin; + } + if (sizeMax) { + q["sizeMax"] = sizeMax; + } + if (dateMin) { + q["dateMin"] = dateMin; + } + if (dateMax) { + q["dateMax"] = dateMax; + } + if (after) { + q["after"] = after.sort; + } + if (selectedIndexIds.length > 0) { + q["indexIds"] = selectedIndexIds; + } + if (selectedMimeTypes.length > 0) { + q["mimeTypes"] = selectedMimeTypes; + } + if (selectedTags.length > 0) { + q["tags"] = selectedTags + } + if (getters.sortMode == "random") { + q["seed"] = getters.seed; + } + if (getters.optHighlight) { + q["highlight"] = true; + q["highlightContextSize"] = Number(getters.optFragmentSize); + } + + return q; + } +} + + +export default new Sist2ElasticsearchQuery(); \ No newline at end of file diff --git a/sist2-vue/src/components/DebugInfo.vue b/sist2-vue/src/components/DebugInfo.vue index 8418f2b..6952057 100644 --- a/sist2-vue/src/components/DebugInfo.vue +++ b/sist2-vue/src/components/DebugInfo.vue @@ -1,41 +1,56 @@ \ No newline at end of file diff --git a/sist2-vue/src/components/DocCard.vue b/sist2-vue/src/components/DocCard.vue index 5178b79..2e4a39f 100644 --- a/sist2-vue/src/components/DocCard.vue +++ b/sist2-vue/src/components/DocCard.vue @@ -1,44 +1,44 @@ \ No newline at end of file diff --git a/sist2-vue/src/components/DocFileTitle.vue b/sist2-vue/src/components/DocFileTitle.vue index aa54bfa..dd8835c 100644 --- a/sist2-vue/src/components/DocFileTitle.vue +++ b/sist2-vue/src/components/DocFileTitle.vue @@ -1,5 +1,5 @@