mirror of
https://github.com/gotson/komga.git
synced 2025-01-09 04:08:00 +08:00
feat: better full text search
work with CJK search by more fields closes #592, closes #597
This commit is contained in:
parent
c73e2b11c8
commit
5aa9a95ca5
1
.gitignore
vendored
1
.gitignore
vendored
@ -48,3 +48,4 @@ nbdist/
|
||||
### Komga
|
||||
/komga/src/main/resources/public/
|
||||
/komga/artemis/
|
||||
/komga/lucene/
|
||||
|
@ -8,6 +8,7 @@ COPY ${DEPENDENCY}/snapshot-dependencies/ ./
|
||||
COPY ${DEPENDENCY}/application/ ./
|
||||
ENV KOMGA_DATABASE_BACKUP_PATH="/config/database-backup.zip"
|
||||
ENV KOMGA_DATABASE_FILE="/config/database.sqlite"
|
||||
ENV KOMGA_LUCENE_DATA_DIRECTORY="/config/lucene"
|
||||
ENV SPRING_ARTEMIS_EMBEDDED_DATA_DIRECTORY="/config/artemis"
|
||||
ENV LOGGING_FILE_NAME="/config/logs/komga.log"
|
||||
ENTRYPOINT ["java", "org.springframework.boot.loader.JarLauncher"]
|
||||
|
@ -1,6 +1,5 @@
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
import org.jooq.meta.jaxb.ForcedType
|
||||
|
||||
plugins {
|
||||
run {
|
||||
@ -58,6 +57,13 @@ dependencies {
|
||||
implementation("org.apache.commons:commons-lang3:3.12.0")
|
||||
implementation("commons-validator:commons-validator:1.7")
|
||||
|
||||
run {
|
||||
val luceneVersion = "8.9.0"
|
||||
implementation("org.apache.lucene:lucene-core:$luceneVersion")
|
||||
implementation("org.apache.lucene:lucene-analyzers-common:$luceneVersion")
|
||||
implementation("org.apache.lucene:lucene-queryparser:$luceneVersion")
|
||||
}
|
||||
|
||||
implementation("com.ibm.icu:icu4j:69.1")
|
||||
|
||||
implementation("org.apache.tika:tika-core:1.26")
|
||||
@ -243,13 +249,6 @@ jooq {
|
||||
generator.apply {
|
||||
database.apply {
|
||||
name = "org.jooq.meta.sqlite.SQLiteDatabase"
|
||||
forcedTypes.addAll(
|
||||
listOf(
|
||||
ForcedType()
|
||||
.withName("varchar")
|
||||
.withIncludeExpression("fts_.*.(title|.*id|isbn|publisher|name)")
|
||||
)
|
||||
)
|
||||
}
|
||||
target.apply {
|
||||
packageName = "org.gotson.komga.jooq"
|
||||
|
@ -0,0 +1,24 @@
|
||||
drop table fts_book_metadata;
|
||||
DROP TRIGGER book_metadata__after_insert;
|
||||
DROP TRIGGER book_metadata__after_delete;
|
||||
DROP TRIGGER book_metadata__after_update;
|
||||
|
||||
drop table fts_series_metadata;
|
||||
DROP TRIGGER series_metadata__after_insert;
|
||||
DROP TRIGGER series_metadata__after_delete;
|
||||
DROP TRIGGER series_metadata__after_update;
|
||||
|
||||
drop table fts_collection;
|
||||
DROP TRIGGER collection__after_insert;
|
||||
DROP TRIGGER collection__after_delete;
|
||||
DROP TRIGGER collection__after_update;
|
||||
|
||||
drop table fts_readlist;
|
||||
DROP TRIGGER readlist__after_insert;
|
||||
DROP TRIGGER readlist__after_delete;
|
||||
DROP TRIGGER readlist__after_update;
|
||||
|
||||
drop table fts_book_metadata_aggregation_author;
|
||||
DROP TRIGGER book_metadata_aggregation_author__after_insert;
|
||||
DROP TRIGGER book_metadata_aggregation_author__after_delete;
|
||||
DROP TRIGGER book_metadata_aggregation_author__after_update;
|
@ -77,4 +77,9 @@ sealed class Task(priority: Int = DEFAULT_PRIORITY) : Serializable {
|
||||
override fun uniqueId(): String = "REPAIR_EXTENSION_$bookId"
|
||||
override fun toString(): String = "RepairExtension(bookId='$bookId', priority='$priority')"
|
||||
}
|
||||
|
||||
class RebuildIndex(priority: Int = DEFAULT_PRIORITY) : Task(priority) {
|
||||
override fun uniqueId() = "REBUILD_INDEX"
|
||||
override fun toString(): String = "RebuildIndex(priority='$priority')"
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import org.gotson.komga.domain.service.LocalArtworkLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesMetadataLifecycle
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_TASKS_SELECTOR
|
||||
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
|
||||
import org.springframework.jms.annotation.JmsListener
|
||||
import org.springframework.stereotype.Service
|
||||
import java.nio.file.Paths
|
||||
@ -33,6 +34,7 @@ class TaskHandler(
|
||||
private val localArtworkLifecycle: LocalArtworkLifecycle,
|
||||
private val bookImporter: BookImporter,
|
||||
private val bookConverter: BookConverter,
|
||||
private val searchIndexLifecycle: SearchIndexLifecycle,
|
||||
) {
|
||||
|
||||
@JmsListener(destination = QUEUE_TASKS, selector = QUEUE_TASKS_SELECTOR)
|
||||
@ -116,6 +118,8 @@ class TaskHandler(
|
||||
bookRepository.findByIdOrNull(task.bookId)?.let { book ->
|
||||
bookLifecycle.hashAndPersist(book)
|
||||
} ?: logger.warn { "Cannot execute task $task: Book does not exist" }
|
||||
|
||||
is Task.RebuildIndex -> searchIndexLifecycle.rebuildIndex()
|
||||
}
|
||||
}.also {
|
||||
logger.info { "Task $task executed in $it" }
|
||||
|
@ -117,6 +117,10 @@ class TaskReceiver(
|
||||
submitTask(Task.ImportBook(sourceFile, seriesId, copyMode, destinationName, upgradeBookId, priority))
|
||||
}
|
||||
|
||||
fun rebuildIndex(priority: Int = DEFAULT_PRIORITY) {
|
||||
submitTask(Task.RebuildIndex(priority))
|
||||
}
|
||||
|
||||
private fun submitTask(task: Task) {
|
||||
logger.info { "Sending task: $task" }
|
||||
jmsTemplates[task.priority]!!.convertAndSend(QUEUE_TASKS, task) {
|
||||
|
@ -30,6 +30,8 @@ class KomgaProperties {
|
||||
|
||||
var cors = Cors()
|
||||
|
||||
var lucene = Lucene()
|
||||
|
||||
class RememberMe {
|
||||
@get:NotBlank
|
||||
var key: String? = null
|
||||
@ -49,4 +51,9 @@ class KomgaProperties {
|
||||
@Deprecated("Unused since 0.81.0")
|
||||
var batchSize: Int = 500
|
||||
}
|
||||
|
||||
class Lucene {
|
||||
@get:NotBlank
|
||||
var dataDirectory: String = ""
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,9 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.domain.model.BookSearchWithReadProgress
|
||||
import org.gotson.komga.domain.model.ReadStatus
|
||||
import org.gotson.komga.infrastructure.search.LuceneEntity
|
||||
import org.gotson.komga.infrastructure.search.LuceneHelper
|
||||
import org.gotson.komga.infrastructure.web.toFilePath
|
||||
import org.gotson.komga.interfaces.rest.dto.AuthorDto
|
||||
import org.gotson.komga.interfaces.rest.dto.BookDto
|
||||
@ -20,9 +21,9 @@ import org.jooq.DSLContext
|
||||
import org.jooq.Record
|
||||
import org.jooq.ResultQuery
|
||||
import org.jooq.impl.DSL
|
||||
import org.jooq.impl.DSL.field
|
||||
import org.jooq.impl.DSL.inline
|
||||
import org.jooq.impl.DSL.lower
|
||||
import org.jooq.impl.DSL.noCondition
|
||||
import org.springframework.data.domain.Page
|
||||
import org.springframework.data.domain.PageImpl
|
||||
import org.springframework.data.domain.PageRequest
|
||||
@ -31,11 +32,10 @@ import org.springframework.data.domain.Sort
|
||||
import org.springframework.stereotype.Component
|
||||
import java.net.URL
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class BookDtoDao(
|
||||
private val dsl: DSLContext
|
||||
private val dsl: DSLContext,
|
||||
private val luceneHelper: LuceneHelper,
|
||||
) : BookDtoRepository {
|
||||
|
||||
private val b = Tables.BOOK
|
||||
@ -46,7 +46,6 @@ class BookDtoDao(
|
||||
private val s = Tables.SERIES
|
||||
private val rlb = Tables.READLIST_BOOK
|
||||
private val bt = Tables.BOOK_METADATA_TAG
|
||||
private val fts = Tables.FTS_BOOK_METADATA
|
||||
|
||||
private val sorts = mapOf(
|
||||
"name" to lower(b.NAME.udfStripAccents()),
|
||||
@ -65,13 +64,12 @@ class BookDtoDao(
|
||||
"metadata.releaseDate" to d.RELEASE_DATE,
|
||||
"readProgress.lastModified" to r.LAST_MODIFIED_DATE,
|
||||
"readList.number" to rlb.NUMBER,
|
||||
"relevance" to field("rank"),
|
||||
)
|
||||
|
||||
override fun findAll(search: BookSearchWithReadProgress, userId: String, pageable: Pageable): Page<BookDto> {
|
||||
val conditions = search.toCondition()
|
||||
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions(), null)
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions(), null, search.searchTerm)
|
||||
}
|
||||
|
||||
override fun findAllByReadListId(
|
||||
@ -83,7 +81,7 @@ class BookDtoDao(
|
||||
): Page<BookDto> {
|
||||
val conditions = rlb.READLIST_ID.eq(readListId).and(search.toCondition())
|
||||
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions().copy(selectReadListNumber = true), filterOnLibraryIds)
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions().copy(selectReadListNumber = true), filterOnLibraryIds, search.searchTerm)
|
||||
}
|
||||
|
||||
private fun findAll(
|
||||
@ -92,46 +90,47 @@ class BookDtoDao(
|
||||
pageable: Pageable,
|
||||
joinConditions: JoinConditions = JoinConditions(),
|
||||
filterOnLibraryIds: Collection<String>?,
|
||||
searchTerm: String?,
|
||||
): Page<BookDto> {
|
||||
return try {
|
||||
val count = dsl.selectDistinct(b.ID)
|
||||
.from(b)
|
||||
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
|
||||
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
|
||||
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
|
||||
.where(conditions)
|
||||
.groupBy(b.ID)
|
||||
.fetch()
|
||||
.size
|
||||
val bookIds = luceneHelper.searchEntitiesIds(searchTerm, LuceneEntity.Book, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = b.ID.inOrNoCondition(bookIds)
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
val count = dsl.selectDistinct(b.ID)
|
||||
.from(b)
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.apply { if (joinConditions.tag) leftJoin(bt).on(b.ID.eq(bt.BOOK_ID)) }
|
||||
.apply { if (joinConditions.selectReadListNumber) leftJoin(rlb).on(b.ID.eq(rlb.BOOK_ID)) }
|
||||
.apply { if (joinConditions.author) leftJoin(a).on(b.ID.eq(a.BOOK_ID)) }
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.groupBy(b.ID)
|
||||
.fetch()
|
||||
.size
|
||||
|
||||
val dtos = selectBase(userId, joinConditions)
|
||||
.where(conditions)
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap()
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
dtos,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !bookIds.isNullOrEmpty()) b.ID.sortByValues(bookIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
}
|
||||
|
||||
val dtos = selectBase(userId, joinConditions)
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap()
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
dtos,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
override fun findByIdOrNull(bookId: String, userId: String): BookDto? =
|
||||
@ -248,7 +247,6 @@ class BookDtoDao(
|
||||
*r.fields()
|
||||
).apply { if (joinConditions.selectReadListNumber) select(rlb.NUMBER) }
|
||||
.from(b)
|
||||
.apply { if (joinConditions.fullTextSearch) join(fts).on(b.ID.eq(fts.BOOK_ID)) }
|
||||
.leftJoin(m).on(b.ID.eq(m.BOOK_ID))
|
||||
.leftJoin(d).on(b.ID.eq(d.BOOK_ID))
|
||||
.leftJoin(r).on(b.ID.eq(r.BOOK_ID)).and(readProgressCondition(userId))
|
||||
@ -279,9 +277,8 @@ class BookDtoDao(
|
||||
}
|
||||
|
||||
private fun BookSearchWithReadProgress.toCondition(): Condition {
|
||||
var c: Condition = DSL.trueCondition()
|
||||
var c: Condition = noCondition()
|
||||
|
||||
if (!searchTerm.isNullOrBlank()) c = c.and(fts.match(searchTerm))
|
||||
if (!libraryIds.isNullOrEmpty()) c = c.and(b.LIBRARY_ID.`in`(libraryIds))
|
||||
if (!seriesIds.isNullOrEmpty()) c = c.and(b.SERIES_ID.`in`(seriesIds))
|
||||
if (!mediaStatus.isNullOrEmpty()) c = c.and(m.STATUS.`in`(mediaStatus))
|
||||
@ -303,7 +300,7 @@ class BookDtoDao(
|
||||
}
|
||||
|
||||
if (!authors.isNullOrEmpty()) {
|
||||
var ca: Condition = DSL.falseCondition()
|
||||
var ca = noCondition()
|
||||
authors.forEach {
|
||||
ca = ca.or(a.NAME.equalIgnoreCase(it.name).and(a.ROLE.equalIgnoreCase(it.role)))
|
||||
}
|
||||
@ -317,14 +314,12 @@ class BookDtoDao(
|
||||
JoinConditions(
|
||||
tag = !tags.isNullOrEmpty(),
|
||||
author = !authors.isNullOrEmpty(),
|
||||
fullTextSearch = !searchTerm.isNullOrBlank(),
|
||||
)
|
||||
|
||||
private data class JoinConditions(
|
||||
val selectReadListNumber: Boolean = false,
|
||||
val tag: Boolean = false,
|
||||
val author: Boolean = false,
|
||||
val fullTextSearch: Boolean = false,
|
||||
)
|
||||
|
||||
private fun BookRecord.toDto(media: MediaDto, metadata: BookMetadataDto, readProgress: ReadProgressDto?) =
|
||||
|
@ -1,8 +1,9 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.domain.model.ReadList
|
||||
import org.gotson.komga.domain.persistence.ReadListRepository
|
||||
import org.gotson.komga.infrastructure.search.LuceneEntity
|
||||
import org.gotson.komga.infrastructure.search.LuceneHelper
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.ReadlistRecord
|
||||
import org.jooq.DSLContext
|
||||
@ -20,21 +21,18 @@ import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
import java.util.SortedMap
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class ReadListDao(
|
||||
private val dsl: DSLContext
|
||||
private val dsl: DSLContext,
|
||||
private val luceneHelper: LuceneHelper,
|
||||
) : ReadListRepository {
|
||||
|
||||
private val rl = Tables.READLIST
|
||||
private val rlb = Tables.READLIST_BOOK
|
||||
private val b = Tables.BOOK
|
||||
private val fts = Tables.FTS_READLIST
|
||||
|
||||
private val sorts = mapOf(
|
||||
"name" to DSL.lower(rl.NAME.udfStripAccents()),
|
||||
"relevance" to DSL.field("rank"),
|
||||
)
|
||||
|
||||
override fun findByIdOrNull(readListId: String): ReadList? =
|
||||
@ -51,79 +49,72 @@ class ReadListDao(
|
||||
.firstOrNull()
|
||||
|
||||
override fun findAll(search: String?, pageable: Pageable): Page<ReadList> {
|
||||
val conditions = if (!search.isNullOrBlank()) searchCondition(search)
|
||||
else DSL.trueCondition()
|
||||
val readListIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.ReadList, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = rl.ID.inOrNoCondition(readListIds)
|
||||
|
||||
return try {
|
||||
val count = dsl.selectCount()
|
||||
.from(rl)
|
||||
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) }
|
||||
.where(conditions)
|
||||
.fetchOne(0, Long::class.java) ?: 0
|
||||
val count = dsl.selectCount()
|
||||
.from(rl)
|
||||
.where(searchCondition)
|
||||
.fetchOne(0, Long::class.java) ?: 0
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items = selectBase(!search.isNullOrBlank())
|
||||
.where(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(null)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
|
||||
count
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !readListIds.isNullOrEmpty()) rl.ID.sortByValues(readListIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
}
|
||||
|
||||
val items = selectBase()
|
||||
.where(searchCondition)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(null)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
|
||||
count
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<ReadList> {
|
||||
val readListIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.ReadList, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = rl.ID.inOrNoCondition(readListIds)
|
||||
|
||||
val conditions = b.LIBRARY_ID.`in`(belongsToLibraryIds)
|
||||
.apply { if (!search.isNullOrBlank()) and(searchCondition(search)) }
|
||||
.and(searchCondition)
|
||||
.apply { filterOnLibraryIds?.let { and(b.LIBRARY_ID.`in`(it)) } }
|
||||
|
||||
return try {
|
||||
val ids = dsl.selectDistinct(rl.ID)
|
||||
.from(rl)
|
||||
.apply { if (!search.isNullOrBlank()) join(fts).on(rl.ID.eq(fts.ID)) }
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
.where(conditions)
|
||||
.fetch(0, String::class.java)
|
||||
val ids = dsl.selectDistinct(rl.ID)
|
||||
.from(rl)
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
.where(conditions)
|
||||
.fetch(0, String::class.java)
|
||||
|
||||
val count = ids.size
|
||||
val count = ids.size
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items = selectBase(!search.isNullOrBlank())
|
||||
.where(rl.ID.`in`(ids))
|
||||
.and(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(filterOnLibraryIds)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !readListIds.isNullOrEmpty()) rl.ID.sortByValues(readListIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
}
|
||||
|
||||
val items = selectBase()
|
||||
.where(rl.ID.`in`(ids))
|
||||
.and(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(filterOnLibraryIds)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllContainingBookId(containsBookId: String, filterOnLibraryIds: Collection<String>?): Collection<ReadList> {
|
||||
@ -157,13 +148,9 @@ class ReadListDao(
|
||||
.fetchAndMap(null)
|
||||
.firstOrNull()
|
||||
|
||||
private fun searchCondition(search: String) =
|
||||
fts.match(search)
|
||||
|
||||
private fun selectBase(joinFts: Boolean = false) =
|
||||
private fun selectBase() =
|
||||
dsl.selectDistinct(*rl.fields())
|
||||
.from(rl)
|
||||
.apply { if (joinFts) join(fts).on(rl.ID.eq(fts.ID)) }
|
||||
.leftJoin(rlb).on(rl.ID.eq(rlb.READLIST_ID))
|
||||
.leftJoin(b).on(rlb.BOOK_ID.eq(b.ID))
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.persistence.ReferentialRepository
|
||||
import org.gotson.komga.infrastructure.language.stripAccents
|
||||
@ -8,10 +7,9 @@ import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.BookMetadataAggregationAuthorRecord
|
||||
import org.gotson.komga.jooq.tables.records.BookMetadataAuthorRecord
|
||||
import org.jooq.DSLContext
|
||||
import org.jooq.impl.DSL.field
|
||||
import org.jooq.impl.DSL.lower
|
||||
import org.jooq.impl.DSL.noCondition
|
||||
import org.jooq.impl.DSL.select
|
||||
import org.jooq.impl.DSL.trueCondition
|
||||
import org.springframework.data.domain.Page
|
||||
import org.springframework.data.domain.PageImpl
|
||||
import org.springframework.data.domain.PageRequest
|
||||
@ -20,8 +18,6 @@ import org.springframework.data.domain.Sort
|
||||
import org.springframework.stereotype.Component
|
||||
import java.time.LocalDate
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class ReferentialDao(
|
||||
private val dsl: DSLContext
|
||||
@ -39,7 +35,6 @@ class ReferentialDao(
|
||||
private val st = Tables.SERIES_METADATA_TAG
|
||||
private val cs = Tables.COLLECTION_SERIES
|
||||
private val rb = Tables.READLIST_BOOK
|
||||
private val ftsAuthors = Tables.FTS_BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
|
||||
override fun findAllAuthorsByName(search: String, filterOnLibraryIds: Collection<String>?): List<Author> =
|
||||
dsl.selectDistinct(a.NAME, a.ROLE)
|
||||
@ -118,56 +113,46 @@ class ReferentialDao(
|
||||
)
|
||||
|
||||
private fun findAuthorsByName(search: String?, role: String?, filterOnLibraryIds: Collection<String>?, pageable: Pageable, filterBy: FilterBy?): Page<Author> {
|
||||
return try {
|
||||
val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { if (!search.isNullOrBlank()) join(ftsAuthors).on(ftsAuthors.rowid().eq(bmaa.rowid())) }
|
||||
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
|
||||
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
|
||||
.apply {
|
||||
if (filterBy?.type == FilterByType.READLIST)
|
||||
leftJoin(b).on(bmaa.SERIES_ID.eq(b.SERIES_ID))
|
||||
.leftJoin(rb).on(b.ID.eq(rb.BOOK_ID))
|
||||
}
|
||||
.where(trueCondition())
|
||||
.apply { if (!search.isNullOrBlank()) and(ftsAuthors.match(search)) }
|
||||
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.apply {
|
||||
filterBy?.let {
|
||||
when (it.type) {
|
||||
FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id))
|
||||
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id))
|
||||
FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
|
||||
FilterByType.READLIST -> and(rb.READLIST_ID.eq(it.id))
|
||||
}
|
||||
val query = dsl.selectDistinct(bmaa.NAME, bmaa.ROLE)
|
||||
.from(bmaa)
|
||||
.apply { if (filterOnLibraryIds != null || filterBy?.type == FilterByType.LIBRARY) leftJoin(s).on(bmaa.SERIES_ID.eq(s.ID)) }
|
||||
.apply { if (filterBy?.type == FilterByType.COLLECTION) leftJoin(cs).on(bmaa.SERIES_ID.eq(cs.SERIES_ID)) }
|
||||
.apply {
|
||||
if (filterBy?.type == FilterByType.READLIST)
|
||||
leftJoin(b).on(bmaa.SERIES_ID.eq(b.SERIES_ID))
|
||||
.leftJoin(rb).on(b.ID.eq(rb.BOOK_ID))
|
||||
}
|
||||
.where(noCondition())
|
||||
.apply { search?.let { and(bmaa.NAME.udfStripAccents().containsIgnoreCase(search.stripAccents())) } }
|
||||
.apply { role?.let { and(bmaa.ROLE.eq(role)) } }
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
.apply {
|
||||
filterBy?.let {
|
||||
when (it.type) {
|
||||
FilterByType.LIBRARY -> and(s.LIBRARY_ID.eq(it.id))
|
||||
FilterByType.COLLECTION -> and(cs.COLLECTION_ID.eq(it.id))
|
||||
FilterByType.SERIES -> and(bmaa.SERIES_ID.eq(it.id))
|
||||
FilterByType.READLIST -> and(rb.READLIST_ID.eq(it.id))
|
||||
}
|
||||
}
|
||||
|
||||
val count = dsl.fetchCount(query)
|
||||
val sort = if (!search.isNullOrBlank()) field("rank")
|
||||
else lower(bmaa.NAME.udfStripAccents())
|
||||
|
||||
val items = query
|
||||
.orderBy(sort)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchInto(a)
|
||||
.map { it.toDomain() }
|
||||
|
||||
val pageSort = Sort.by("relevance")
|
||||
PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
val count = dsl.fetchCount(query)
|
||||
val sort = lower(bmaa.NAME.udfStripAccents())
|
||||
|
||||
val items = query
|
||||
.orderBy(sort)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchInto(a)
|
||||
.map { it.toDomain() }
|
||||
|
||||
val pageSort = Sort.by("relevance")
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllAuthorsNamesByName(search: String, filterOnLibraryIds: Collection<String>?): List<String> =
|
||||
|
@ -1,8 +1,9 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.domain.model.SeriesCollection
|
||||
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
|
||||
import org.gotson.komga.infrastructure.search.LuceneEntity
|
||||
import org.gotson.komga.infrastructure.search.LuceneHelper
|
||||
import org.gotson.komga.jooq.Tables
|
||||
import org.gotson.komga.jooq.tables.records.CollectionRecord
|
||||
import org.jooq.DSLContext
|
||||
@ -19,21 +20,18 @@ import org.springframework.transaction.annotation.Transactional
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class SeriesCollectionDao(
|
||||
private val dsl: DSLContext
|
||||
private val dsl: DSLContext,
|
||||
private val luceneHelper: LuceneHelper,
|
||||
) : SeriesCollectionRepository {
|
||||
|
||||
private val c = Tables.COLLECTION
|
||||
private val cs = Tables.COLLECTION_SERIES
|
||||
private val s = Tables.SERIES
|
||||
private val fts = Tables.FTS_COLLECTION
|
||||
|
||||
private val sorts = mapOf(
|
||||
"name" to DSL.lower(c.NAME.udfStripAccents()),
|
||||
"relevance" to DSL.field("rank"),
|
||||
)
|
||||
|
||||
override fun findByIdOrNull(collectionId: String): SeriesCollection? =
|
||||
@ -50,78 +48,72 @@ class SeriesCollectionDao(
|
||||
.firstOrNull()
|
||||
|
||||
override fun findAll(search: String?, pageable: Pageable): Page<SeriesCollection> {
|
||||
val conditions = if (!search.isNullOrBlank()) searchCondition(search)
|
||||
else DSL.trueCondition()
|
||||
val collectionIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.Collection, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = c.ID.inOrNoCondition(collectionIds)
|
||||
|
||||
return try {
|
||||
val count = dsl.selectCount()
|
||||
.from(c)
|
||||
.apply { if (!search.isNullOrBlank()) join(fts).on(c.ID.eq(fts.ID)) }
|
||||
.where(conditions)
|
||||
.fetchOne(0, Long::class.java) ?: 0
|
||||
val count = dsl.selectCount()
|
||||
.from(c)
|
||||
.where(searchCondition)
|
||||
.fetchOne(0, Long::class.java) ?: 0
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items = selectBase(!search.isNullOrBlank())
|
||||
.where(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(null)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
|
||||
count
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !collectionIds.isNullOrEmpty()) c.ID.sortByValues(collectionIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
}
|
||||
|
||||
val items = selectBase()
|
||||
.where(searchCondition)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(null)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count.toInt(), 20), pageSort),
|
||||
count
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllByLibraryIds(belongsToLibraryIds: Collection<String>, filterOnLibraryIds: Collection<String>?, search: String?, pageable: Pageable): Page<SeriesCollection> {
|
||||
val collectionIds = luceneHelper.searchEntitiesIds(search, LuceneEntity.Collection, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = c.ID.inOrNoCondition(collectionIds)
|
||||
|
||||
val conditions = s.LIBRARY_ID.`in`(belongsToLibraryIds)
|
||||
.apply { if (!search.isNullOrBlank()) and(searchCondition(search)) }
|
||||
.and(searchCondition)
|
||||
.apply { filterOnLibraryIds?.let { and(s.LIBRARY_ID.`in`(it)) } }
|
||||
|
||||
return try {
|
||||
val ids = dsl.selectDistinct(c.ID)
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
.where(conditions)
|
||||
.fetch(0, String::class.java)
|
||||
val ids = dsl.selectDistinct(c.ID)
|
||||
.from(c)
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
.where(conditions)
|
||||
.fetch(0, String::class.java)
|
||||
|
||||
val count = ids.size
|
||||
val count = ids.size
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val items = selectBase(!search.isNullOrBlank())
|
||||
.where(c.ID.`in`(ids))
|
||||
.and(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(filterOnLibraryIds)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !collectionIds.isNullOrEmpty()) c.ID.sortByValues(collectionIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
}
|
||||
|
||||
val items = selectBase()
|
||||
.where(c.ID.`in`(ids))
|
||||
.and(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap(filterOnLibraryIds)
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
items,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
override fun findAllContainingSeriesId(containsSeriesId: String, filterOnLibraryIds: Collection<String>?): Collection<SeriesCollection> {
|
||||
@ -155,13 +147,9 @@ class SeriesCollectionDao(
|
||||
.fetchAndMap(null)
|
||||
.firstOrNull()
|
||||
|
||||
private fun searchCondition(search: String) =
|
||||
fts.match(search)
|
||||
|
||||
private fun selectBase(joinFts: Boolean = false) =
|
||||
private fun selectBase() =
|
||||
dsl.selectDistinct(*c.fields())
|
||||
.from(c)
|
||||
.apply { if (joinFts) join(fts).on(c.ID.eq(fts.ID)) }
|
||||
.leftJoin(cs).on(c.ID.eq(cs.COLLECTION_ID))
|
||||
.leftJoin(s).on(cs.SERIES_ID.eq(s.ID))
|
||||
|
||||
@ -232,6 +220,7 @@ class SeriesCollectionDao(
|
||||
|
||||
@Transactional
|
||||
override fun delete(collectionId: String) {
|
||||
|
||||
dsl.deleteFrom(cs).where(cs.COLLECTION_ID.eq(collectionId)).execute()
|
||||
dsl.deleteFrom(c).where(c.ID.eq(collectionId)).execute()
|
||||
}
|
||||
|
@ -4,6 +4,8 @@ import mu.KotlinLogging
|
||||
import org.gotson.komga.domain.model.ReadStatus
|
||||
import org.gotson.komga.domain.model.SeriesSearch
|
||||
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
|
||||
import org.gotson.komga.infrastructure.search.LuceneEntity
|
||||
import org.gotson.komga.infrastructure.search.LuceneHelper
|
||||
import org.gotson.komga.infrastructure.web.toFilePath
|
||||
import org.gotson.komga.interfaces.rest.dto.AuthorDto
|
||||
import org.gotson.komga.interfaces.rest.dto.BookMetadataAggregationDto
|
||||
@ -43,7 +45,8 @@ const val BOOKS_READ_COUNT = "booksReadCount"
|
||||
|
||||
@Component
|
||||
class SeriesDtoDao(
|
||||
private val dsl: DSLContext
|
||||
private val dsl: DSLContext,
|
||||
private val luceneHelper: LuceneHelper,
|
||||
) : SeriesDtoRepository {
|
||||
|
||||
companion object {
|
||||
@ -57,7 +60,6 @@ class SeriesDtoDao(
|
||||
private val bma = Tables.BOOK_METADATA_AGGREGATION
|
||||
private val bmaa = Tables.BOOK_METADATA_AGGREGATION_AUTHOR
|
||||
private val bmat = Tables.BOOK_METADATA_AGGREGATION_TAG
|
||||
private val fts = Tables.FTS_SERIES_METADATA
|
||||
|
||||
val countUnread: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isNull, 1).otherwise(0))
|
||||
val countRead: AggregateFunction<BigDecimal> = DSL.sum(DSL.`when`(r.COMPLETED.isTrue, 1).otherwise(0))
|
||||
@ -81,13 +83,12 @@ class SeriesDtoDao(
|
||||
"collection.number" to cs.NUMBER,
|
||||
"name" to lower(s.NAME.udfStripAccents()),
|
||||
"booksCount" to s.BOOK_COUNT,
|
||||
"relevance" to DSL.field("rank"),
|
||||
)
|
||||
|
||||
override fun findAll(search: SeriesSearchWithReadProgress, userId: String, pageable: Pageable): Page<SeriesDto> {
|
||||
val conditions = search.toCondition()
|
||||
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions())
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions(), search.searchTerm)
|
||||
}
|
||||
|
||||
override fun findAllByCollectionId(
|
||||
@ -99,7 +100,7 @@ class SeriesDtoDao(
|
||||
val conditions = search.toCondition().and(cs.COLLECTION_ID.eq(collectionId))
|
||||
val joinConditions = search.toJoinConditions().copy(selectCollectionNumber = true, collection = true)
|
||||
|
||||
return findAll(conditions, userId, pageable, joinConditions)
|
||||
return findAll(conditions, userId, pageable, joinConditions, search.searchTerm)
|
||||
}
|
||||
|
||||
override fun findAllRecentlyUpdated(
|
||||
@ -110,41 +111,35 @@ class SeriesDtoDao(
|
||||
val conditions = search.toCondition()
|
||||
.and(s.CREATED_DATE.ne(s.LAST_MODIFIED_DATE))
|
||||
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions())
|
||||
return findAll(conditions, userId, pageable, search.toJoinConditions(), search.searchTerm)
|
||||
}
|
||||
|
||||
override fun countByFirstCharacter(search: SeriesSearchWithReadProgress, userId: String): List<GroupCountDto> {
|
||||
val conditions = search.toCondition()
|
||||
val joinConditions = search.toJoinConditions()
|
||||
val seriesIds = luceneHelper.searchEntitiesIds(search.searchTerm, LuceneEntity.Series, 20)
|
||||
val searchCondition = s.ID.inOrNoCondition(seriesIds)
|
||||
|
||||
val firstChar = lower(substring(d.TITLE_SORT, 1, 1))
|
||||
return try {
|
||||
dsl.select(firstChar, count())
|
||||
.from(s)
|
||||
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply {
|
||||
if (joinConditions.tag)
|
||||
leftJoin(st).on(s.ID.eq(st.SERIES_ID))
|
||||
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
|
||||
}
|
||||
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
|
||||
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
|
||||
.where(conditions)
|
||||
.groupBy(firstChar)
|
||||
.map {
|
||||
GroupCountDto(it.value1(), it.value2())
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) emptyList()
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
return dsl.select(firstChar, count())
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply {
|
||||
if (joinConditions.tag)
|
||||
leftJoin(st).on(s.ID.eq(st.SERIES_ID))
|
||||
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
|
||||
}
|
||||
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
|
||||
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.groupBy(firstChar)
|
||||
.map {
|
||||
GroupCountDto(it.value1(), it.value2())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun findByIdOrNull(seriesId: String, userId: String): SeriesDto? =
|
||||
@ -161,7 +156,6 @@ class SeriesDtoDao(
|
||||
dsl.selectDistinct(*groupFields)
|
||||
.apply { if (joinConditions.selectCollectionNumber) select(cs.NUMBER) }
|
||||
.from(s)
|
||||
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
@ -178,48 +172,49 @@ class SeriesDtoDao(
|
||||
conditions: Condition,
|
||||
userId: String,
|
||||
pageable: Pageable,
|
||||
joinConditions: JoinConditions = JoinConditions()
|
||||
joinConditions: JoinConditions = JoinConditions(),
|
||||
searchTerm: String?,
|
||||
): Page<SeriesDto> {
|
||||
return try {
|
||||
val count = dsl.select(count(s.ID))
|
||||
.from(s)
|
||||
.apply { if (joinConditions.fullTextSearch) join(fts).on(s.ID.eq(fts.SERIES_ID)) }
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply {
|
||||
if (joinConditions.tag)
|
||||
leftJoin(st).on(s.ID.eq(st.SERIES_ID))
|
||||
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
|
||||
}
|
||||
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
|
||||
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
|
||||
.where(conditions)
|
||||
.fetchOne(count(s.ID)) ?: 0
|
||||
val seriesIds = luceneHelper.searchEntitiesIds(searchTerm, LuceneEntity.Series, if (pageable.isPaged) pageable.pageSize else 20)
|
||||
val searchCondition = s.ID.inOrNoCondition(seriesIds)
|
||||
|
||||
val orderBy = pageable.sort.toOrderBy(sorts)
|
||||
|
||||
val dtos = selectBase(userId, joinConditions)
|
||||
.where(conditions)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap()
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
PageImpl(
|
||||
dtos,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
if (e.isFtsError()) PageImpl(emptyList())
|
||||
else {
|
||||
logger.error(e) { "Error while fetching data" }
|
||||
throw e
|
||||
val count = dsl.select(count(s.ID))
|
||||
.from(s)
|
||||
.leftJoin(d).on(s.ID.eq(d.SERIES_ID))
|
||||
.leftJoin(bma).on(s.ID.eq(bma.SERIES_ID))
|
||||
.leftJoin(rs).on(s.ID.eq(rs.SERIES_ID)).and(readProgressConditionSeries(userId))
|
||||
.apply { if (joinConditions.genre) leftJoin(g).on(s.ID.eq(g.SERIES_ID)) }
|
||||
.apply {
|
||||
if (joinConditions.tag)
|
||||
leftJoin(st).on(s.ID.eq(st.SERIES_ID))
|
||||
.leftJoin(bmat).on(s.ID.eq(bmat.SERIES_ID))
|
||||
}
|
||||
}
|
||||
.apply { if (joinConditions.collection) leftJoin(cs).on(s.ID.eq(cs.SERIES_ID)) }
|
||||
.apply { if (joinConditions.aggregationAuthor) leftJoin(bmaa).on(s.ID.eq(bmaa.SERIES_ID)) }
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.fetchOne(count(s.ID)) ?: 0
|
||||
|
||||
val orderBy =
|
||||
pageable.sort.mapNotNull {
|
||||
if (it.property == "relevance" && !seriesIds.isNullOrEmpty()) s.ID.sortByValues(seriesIds, it.isAscending)
|
||||
else it.toSortField(sorts)
|
||||
}
|
||||
|
||||
val dtos = selectBase(userId, joinConditions)
|
||||
.where(conditions)
|
||||
.and(searchCondition)
|
||||
.orderBy(orderBy)
|
||||
.apply { if (pageable.isPaged) limit(pageable.pageSize).offset(pageable.offset) }
|
||||
.fetchAndMap()
|
||||
|
||||
val pageSort = if (orderBy.size > 1) pageable.sort else Sort.unsorted()
|
||||
return PageImpl(
|
||||
dtos,
|
||||
if (pageable.isPaged) PageRequest.of(pageable.pageNumber, pageable.pageSize, pageSort)
|
||||
else PageRequest.of(0, maxOf(count, 20), pageSort),
|
||||
count.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
private fun readProgressConditionSeries(userId: String): Condition = rs.USER_ID.eq(userId).or(rs.USER_ID.isNull)
|
||||
@ -266,9 +261,8 @@ class SeriesDtoDao(
|
||||
}
|
||||
|
||||
private fun SeriesSearchWithReadProgress.toCondition(): Condition {
|
||||
var c: Condition = DSL.trueCondition()
|
||||
var c = DSL.noCondition()
|
||||
|
||||
if (!searchTerm.isNullOrBlank()) c = c.and(fts.match(searchTerm))
|
||||
if (!libraryIds.isNullOrEmpty()) c = c.and(s.LIBRARY_ID.`in`(libraryIds))
|
||||
if (!collectionIds.isNullOrEmpty()) c = c.and(cs.COLLECTION_ID.`in`(collectionIds))
|
||||
searchRegex?.let { c = c.and((it.second.toColumn()).likeRegex(it.first)) }
|
||||
@ -280,14 +274,14 @@ class SeriesDtoDao(
|
||||
if (!genres.isNullOrEmpty()) c = c.and(lower(g.GENRE).`in`(genres.map { it.lowercase() }))
|
||||
if (!tags.isNullOrEmpty()) c = c.and(lower(st.TAG).`in`(tags.map { it.lowercase() }).or(lower(bmat.TAG).`in`(tags.map { it.lowercase() })))
|
||||
if (!ageRatings.isNullOrEmpty()) {
|
||||
val c1 = if (ageRatings.contains(null)) d.AGE_RATING.isNull else DSL.falseCondition()
|
||||
val c2 = if (ageRatings.filterNotNull().isNotEmpty()) d.AGE_RATING.`in`(ageRatings.filterNotNull()) else DSL.falseCondition()
|
||||
val c1 = if (ageRatings.contains(null)) d.AGE_RATING.isNull else DSL.noCondition()
|
||||
val c2 = if (ageRatings.filterNotNull().isNotEmpty()) d.AGE_RATING.`in`(ageRatings.filterNotNull()) else DSL.noCondition()
|
||||
c = c.and(c1.or(c2))
|
||||
}
|
||||
// cast to String is necessary for SQLite, else the years in the IN block are coerced to Int, even though YEAR for SQLite uses strftime (string)
|
||||
if (!releaseYears.isNullOrEmpty()) c = c.and(DSL.year(bma.RELEASE_DATE).cast(String::class.java).`in`(releaseYears))
|
||||
if (!authors.isNullOrEmpty()) {
|
||||
var ca: Condition = DSL.falseCondition()
|
||||
var ca = DSL.noCondition()
|
||||
authors.forEach {
|
||||
ca = ca.or(bmaa.NAME.equalIgnoreCase(it.name).and(bmaa.ROLE.equalIgnoreCase(it.role)))
|
||||
}
|
||||
@ -320,7 +314,6 @@ class SeriesDtoDao(
|
||||
tag = !tags.isNullOrEmpty(),
|
||||
collection = !collectionIds.isNullOrEmpty(),
|
||||
aggregationAuthor = !authors.isNullOrEmpty(),
|
||||
fullTextSearch = !searchTerm.isNullOrBlank(),
|
||||
)
|
||||
|
||||
private data class JoinConditions(
|
||||
@ -329,7 +322,6 @@ class SeriesDtoDao(
|
||||
val tag: Boolean = false,
|
||||
val collection: Boolean = false,
|
||||
val aggregationAuthor: Boolean = false,
|
||||
val fullTextSearch: Boolean = false,
|
||||
)
|
||||
|
||||
private fun SeriesRecord.toDto(
|
||||
|
@ -4,10 +4,8 @@ import org.gotson.komga.infrastructure.datasource.SqliteUdfDataSource
|
||||
import org.jooq.Condition
|
||||
import org.jooq.Field
|
||||
import org.jooq.SortField
|
||||
import org.jooq.Table
|
||||
import org.jooq.impl.DSL
|
||||
import org.springframework.data.domain.Sort
|
||||
import org.sqlite.SQLiteException
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
import java.time.ZoneOffset
|
||||
@ -17,8 +15,26 @@ fun LocalDateTime.toUTC(): LocalDateTime =
|
||||
|
||||
fun Sort.toOrderBy(sorts: Map<String, Field<out Any>>): List<SortField<out Any>> =
|
||||
this.mapNotNull {
|
||||
val f = sorts[it.property]
|
||||
if (it.isAscending) f?.asc() else f?.desc()
|
||||
it.toSortField(sorts)
|
||||
}
|
||||
|
||||
fun Sort.Order.toSortField(sorts: Map<String, Field<out Any>>): SortField<out Any>? {
|
||||
val f = sorts[property] ?: return null
|
||||
return if (isAscending) f.asc() else f.desc()
|
||||
}
|
||||
|
||||
fun Field<String>.sortByValues(values: List<String>, asc: Boolean = true): Field<Int> {
|
||||
var c = DSL.choose(this).`when`("dummy dsl", Int.MAX_VALUE)
|
||||
val multiplier = if (asc) 1 else -1
|
||||
values.forEachIndexed { index, value -> c = c.`when`(value, index * multiplier) }
|
||||
return c.otherwise(Int.MAX_VALUE)
|
||||
}
|
||||
|
||||
fun Field<String>.inOrNoCondition(list: List<String>?): Condition =
|
||||
when {
|
||||
list == null -> DSL.noCondition()
|
||||
list.isEmpty() -> DSL.falseCondition()
|
||||
else -> this.`in`(list)
|
||||
}
|
||||
|
||||
fun LocalDateTime.toCurrentTimeZone(): LocalDateTime =
|
||||
@ -26,21 +42,3 @@ fun LocalDateTime.toCurrentTimeZone(): LocalDateTime =
|
||||
|
||||
fun Field<String>.udfStripAccents() =
|
||||
DSL.function(SqliteUdfDataSource.udfStripAccents, String::class.java, this)
|
||||
|
||||
fun Table<*>.match(term: String): Condition =
|
||||
DSL.condition("{0} MATCH {1}", DSL.field(this.name), term.ftsSanitized())
|
||||
|
||||
fun String.ftsSanitized() = this
|
||||
.replace("-", " ") // to better match queries like "x-men"
|
||||
.replace("[^\\p{L}\\p{Z}\\p{N}\":+*^{}()]".toRegex(), "") // to avoid fts5 syntax error
|
||||
.removePrefix("*") // to avoid unknown special query
|
||||
|
||||
private val ftsErrorMessages = listOf("no such column", "unknown special query", "fts5: syntax error near", "unterminated string")
|
||||
|
||||
/**
|
||||
* FTS queries of the form field:term with a field name that doesn't exist will raise an exception
|
||||
* given the same search string can be requested for different object type, this could happen quite often
|
||||
*/
|
||||
fun Exception.isFtsError() =
|
||||
cause is SQLiteException &&
|
||||
ftsErrorMessages.any { message?.contains(it) == true }
|
||||
|
@ -1,9 +1,12 @@
|
||||
package org.gotson.komga.infrastructure.language
|
||||
|
||||
import org.apache.commons.lang3.StringUtils
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
import java.time.ZoneId
|
||||
import java.time.temporal.ChronoUnit
|
||||
import java.time.temporal.TemporalUnit
|
||||
import java.util.Date
|
||||
import java.util.Enumeration
|
||||
import java.util.SortedMap
|
||||
|
||||
@ -39,3 +42,5 @@ fun LocalDateTime.notEquals(other: LocalDateTime, precision: TemporalUnit = Chro
|
||||
this.truncatedTo(precision) != other.truncatedTo(precision)
|
||||
|
||||
fun String.stripAccents(): String = StringUtils.stripAccents(this)
|
||||
|
||||
fun LocalDate.toDate(): Date = Date.from(this.atStartOfDay(ZoneId.of("Z")).toInstant())
|
||||
|
@ -0,0 +1,31 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import org.apache.lucene.store.ByteBuffersDirectory
|
||||
import org.apache.lucene.store.Directory
|
||||
import org.apache.lucene.store.FSDirectory
|
||||
import org.apache.lucene.util.Version
|
||||
import org.gotson.komga.infrastructure.configuration.KomgaProperties
|
||||
import org.springframework.context.annotation.Bean
|
||||
import org.springframework.context.annotation.Configuration
|
||||
import org.springframework.context.annotation.Profile
|
||||
import java.nio.file.Paths
|
||||
|
||||
@Configuration
|
||||
class LuceneConfiguration(
|
||||
private val komgaProperties: KomgaProperties,
|
||||
) {
|
||||
|
||||
@Bean
|
||||
fun analyzer() =
|
||||
MultiLingualAnalyzer().apply { version = Version.LUCENE_8_9_0 }
|
||||
|
||||
@Bean
|
||||
@Profile("test")
|
||||
fun memoryDirectory(): Directory =
|
||||
ByteBuffersDirectory()
|
||||
|
||||
@Bean
|
||||
@Profile("!test")
|
||||
fun diskDirectory(): Directory =
|
||||
FSDirectory.open(Paths.get(komgaProperties.lucene.dataDirectory))
|
||||
}
|
@ -0,0 +1,88 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import org.apache.lucene.document.DateTools
|
||||
import org.apache.lucene.document.Document
|
||||
import org.apache.lucene.document.Field
|
||||
import org.apache.lucene.document.StringField
|
||||
import org.apache.lucene.document.TextField
|
||||
import org.gotson.komga.domain.model.ReadList
|
||||
import org.gotson.komga.domain.model.SeriesCollection
|
||||
import org.gotson.komga.infrastructure.language.toDate
|
||||
import org.gotson.komga.interfaces.rest.dto.BookDto
|
||||
import org.gotson.komga.interfaces.rest.dto.SeriesDto
|
||||
|
||||
enum class LuceneEntity(val type: String, val id: String, val defaultFields: Array<String>) {
|
||||
Book("book", "book_id", arrayOf("title", "isbn")),
|
||||
Series("series", "series_id", arrayOf("title")),
|
||||
Collection("collection", "collection_id", arrayOf("name")),
|
||||
ReadList("readlist", "readlist_id", arrayOf("name"));
|
||||
|
||||
companion object {
|
||||
const val TYPE = "type"
|
||||
}
|
||||
}
|
||||
|
||||
fun BookDto.toDocument() =
|
||||
Document().apply {
|
||||
add(TextField("title", metadata.title, Field.Store.NO))
|
||||
add(TextField("isbn", metadata.isbn, Field.Store.NO))
|
||||
metadata.tags.forEach {
|
||||
add(TextField("tag", it, Field.Store.NO))
|
||||
}
|
||||
metadata.authors.forEach {
|
||||
add(TextField("author", it.name, Field.Store.NO))
|
||||
add(TextField(it.role, it.name, Field.Store.NO))
|
||||
}
|
||||
if (metadata.releaseDate != null) add(TextField("release_date", DateTools.dateToString(metadata.releaseDate.toDate(), DateTools.Resolution.YEAR), Field.Store.NO))
|
||||
add(TextField("status", media.status, Field.Store.NO))
|
||||
add(TextField("deleted", deleted.toString(), Field.Store.NO))
|
||||
|
||||
add(StringField(LuceneEntity.TYPE, LuceneEntity.Book.type, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.Book.id, id, Field.Store.YES))
|
||||
}
|
||||
|
||||
fun SeriesDto.toDocument() =
|
||||
Document().apply {
|
||||
add(TextField("title", metadata.title, Field.Store.NO))
|
||||
add(TextField("publisher", metadata.publisher, Field.Store.NO))
|
||||
add(TextField("status", metadata.status, Field.Store.NO))
|
||||
add(TextField("reading_direction", metadata.readingDirection, Field.Store.NO))
|
||||
if (metadata.ageRating != null) add(TextField("age_rating", metadata.ageRating.toString(), Field.Store.NO))
|
||||
if (metadata.language.isNotBlank()) add(TextField("language", metadata.language, Field.Store.NO))
|
||||
metadata.tags.forEach {
|
||||
add(TextField("series_tag", it, Field.Store.NO))
|
||||
add(TextField("tag", it, Field.Store.NO))
|
||||
}
|
||||
booksMetadata.tags.forEach {
|
||||
add(TextField("book_tag", it, Field.Store.NO))
|
||||
add(TextField("tag", it, Field.Store.NO))
|
||||
}
|
||||
metadata.genres.forEach {
|
||||
add(TextField("genre", it, Field.Store.NO))
|
||||
}
|
||||
if (metadata.totalBookCount != null) add(TextField("total_book_count", metadata.totalBookCount.toString(), Field.Store.NO))
|
||||
add(TextField("book_count", booksCount.toString(), Field.Store.NO))
|
||||
booksMetadata.authors.forEach {
|
||||
add(TextField("author", it.name, Field.Store.NO))
|
||||
add(TextField(it.role, it.name, Field.Store.NO))
|
||||
}
|
||||
if (booksMetadata.releaseDate != null) add(TextField("release_date", DateTools.dateToString(booksMetadata.releaseDate.toDate(), DateTools.Resolution.YEAR), Field.Store.NO))
|
||||
add(TextField("deleted", deleted.toString(), Field.Store.NO))
|
||||
|
||||
add(StringField(LuceneEntity.TYPE, LuceneEntity.Series.type, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.Series.id, id, Field.Store.YES))
|
||||
}
|
||||
|
||||
fun SeriesCollection.toDocument() =
|
||||
Document().apply {
|
||||
add(TextField("name", name, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.TYPE, LuceneEntity.Collection.type, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.Collection.id, id, Field.Store.YES))
|
||||
}
|
||||
|
||||
fun ReadList.toDocument() =
|
||||
Document().apply {
|
||||
add(TextField("name", name, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.TYPE, LuceneEntity.ReadList.type, Field.Store.NO))
|
||||
add(StringField(LuceneEntity.ReadList.id, id, Field.Store.YES))
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.apache.lucene.analysis.Analyzer
|
||||
import org.apache.lucene.index.DirectoryReader
|
||||
import org.apache.lucene.index.IndexWriter
|
||||
import org.apache.lucene.index.IndexWriterConfig
|
||||
import org.apache.lucene.index.Term
|
||||
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser
|
||||
import org.apache.lucene.queryparser.classic.ParseException
|
||||
import org.apache.lucene.queryparser.classic.QueryParser
|
||||
import org.apache.lucene.search.BooleanClause
|
||||
import org.apache.lucene.search.BooleanQuery
|
||||
import org.apache.lucene.search.IndexSearcher
|
||||
import org.apache.lucene.search.TermQuery
|
||||
import org.apache.lucene.store.Directory
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class LuceneHelper(
|
||||
private val directory: Directory,
|
||||
private val analyzer: Analyzer,
|
||||
) {
|
||||
|
||||
fun getIndexWriter() = IndexWriter(directory, IndexWriterConfig(analyzer))
|
||||
|
||||
fun getIndexReader(): DirectoryReader = DirectoryReader.open(directory)
|
||||
|
||||
fun indexExists(): Boolean = DirectoryReader.indexExists(directory)
|
||||
|
||||
fun searchEntitiesIds(searchTerm: String?, entity: LuceneEntity, size: Int): List<String>? {
|
||||
return if (!searchTerm.isNullOrBlank()) {
|
||||
try {
|
||||
val fieldsQuery = MultiFieldQueryParser(entity.defaultFields, analyzer).apply {
|
||||
defaultOperator = QueryParser.Operator.AND
|
||||
}.parse(searchTerm)
|
||||
|
||||
val typeQuery = TermQuery(Term(LuceneEntity.TYPE, entity.type))
|
||||
|
||||
val booleanQuery = BooleanQuery.Builder()
|
||||
.add(fieldsQuery, BooleanClause.Occur.MUST)
|
||||
.add(typeQuery, BooleanClause.Occur.MUST)
|
||||
.build()
|
||||
|
||||
getIndexReader().use { index ->
|
||||
val searcher = IndexSearcher(index)
|
||||
val topDocs = searcher.search(booleanQuery, size)
|
||||
topDocs.scoreDocs.map { searcher.doc(it.doc)[entity.id] }
|
||||
}
|
||||
} catch (e: ParseException) {
|
||||
emptyList()
|
||||
} catch (e: Exception) {
|
||||
logger.error(e) { "Error fetching entities from index" }
|
||||
emptyList()
|
||||
}
|
||||
} else null
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer
|
||||
import org.apache.lucene.analysis.LowerCaseFilter
|
||||
import org.apache.lucene.analysis.TokenStream
|
||||
import org.apache.lucene.analysis.Tokenizer
|
||||
import org.apache.lucene.analysis.cjk.CJKBigramFilter
|
||||
import org.apache.lucene.analysis.cjk.CJKWidthFilter
|
||||
import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer
|
||||
|
||||
class MultiLingualAnalyzer : Analyzer() {
|
||||
override fun createComponents(fieldName: String): TokenStreamComponents {
|
||||
val source: Tokenizer = StandardTokenizer()
|
||||
// run the widthfilter first before bigramming, it sometimes combines characters.
|
||||
var filter: TokenStream = CJKWidthFilter(source)
|
||||
filter = LowerCaseFilter(filter)
|
||||
filter = CJKBigramFilter(filter)
|
||||
filter = ASCIIFoldingFilter(filter)
|
||||
return TokenStreamComponents(source, filter)
|
||||
}
|
||||
|
||||
override fun normalize(fieldName: String?, `in`: TokenStream): TokenStream {
|
||||
var filter: TokenStream = CJKWidthFilter(`in`)
|
||||
filter = LowerCaseFilter(filter)
|
||||
filter = ASCIIFoldingFilter(filter)
|
||||
return filter
|
||||
}
|
||||
}
|
@ -0,0 +1,116 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.apache.lucene.document.Document
|
||||
import org.apache.lucene.index.Term
|
||||
import org.gotson.komga.domain.model.BookSearchWithReadProgress
|
||||
import org.gotson.komga.domain.model.DomainEvent
|
||||
import org.gotson.komga.domain.model.ReadList
|
||||
import org.gotson.komga.domain.model.SeriesCollection
|
||||
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
|
||||
import org.gotson.komga.domain.persistence.ReadListRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_SSE
|
||||
import org.gotson.komga.infrastructure.jms.QUEUE_SSE_SELECTOR
|
||||
import org.gotson.komga.infrastructure.jms.TOPIC_FACTORY
|
||||
import org.gotson.komga.interfaces.rest.dto.BookDto
|
||||
import org.gotson.komga.interfaces.rest.dto.SeriesDto
|
||||
import org.gotson.komga.interfaces.rest.persistence.BookDtoRepository
|
||||
import org.gotson.komga.interfaces.rest.persistence.SeriesDtoRepository
|
||||
import org.springframework.data.domain.Page
|
||||
import org.springframework.data.domain.PageRequest
|
||||
import org.springframework.data.domain.Pageable
|
||||
import org.springframework.jms.annotation.JmsListener
|
||||
import org.springframework.stereotype.Component
|
||||
import kotlin.math.ceil
|
||||
import kotlin.time.measureTime
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Component
|
||||
class SearchIndexLifecycle(
|
||||
private val collectionRepository: SeriesCollectionRepository,
|
||||
private val readListRepository: ReadListRepository,
|
||||
private val bookDtoRepository: BookDtoRepository,
|
||||
private val seriesDtoRepository: SeriesDtoRepository,
|
||||
private val luceneHelper: LuceneHelper,
|
||||
) {
|
||||
|
||||
fun rebuildIndex() {
|
||||
logger.info { "Rebuild all indexes" }
|
||||
|
||||
LuceneEntity.values().forEach {
|
||||
when (it) {
|
||||
LuceneEntity.Book -> rebuildIndex(it, { p: Pageable -> bookDtoRepository.findAll(BookSearchWithReadProgress(), "unused", p) }, { e: BookDto -> e.toDocument() })
|
||||
LuceneEntity.Series -> rebuildIndex(it, { p: Pageable -> seriesDtoRepository.findAll(SeriesSearchWithReadProgress(), "unused", p) }, { e: SeriesDto -> e.toDocument() })
|
||||
LuceneEntity.Collection -> rebuildIndex(it, { p: Pageable -> collectionRepository.findAll(pageable = p) }, { e: SeriesCollection -> e.toDocument() })
|
||||
LuceneEntity.ReadList -> rebuildIndex(it, { p: Pageable -> readListRepository.findAll(pageable = p) }, { e: ReadList -> e.toDocument() })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun <T> rebuildIndex(entity: LuceneEntity, provider: (Pageable) -> Page<out T>, toDoc: (T) -> Document) {
|
||||
logger.info { "Rebuilding index for ${entity.name}" }
|
||||
|
||||
val count = provider(Pageable.ofSize(1)).totalElements
|
||||
val batchSize = 5_000
|
||||
val pages = ceil(count.toDouble() / batchSize).toInt()
|
||||
logger.info { "Number of entities: $count" }
|
||||
|
||||
luceneHelper.getIndexWriter().use { indexWriter ->
|
||||
measureTime {
|
||||
indexWriter.deleteDocuments(Term(LuceneEntity.TYPE, entity.type))
|
||||
|
||||
(0 until pages).forEach { page ->
|
||||
logger.info { "Processing page $page of $batchSize elements" }
|
||||
val entityDocs = provider(PageRequest.of(page, batchSize)).content
|
||||
.map { toDoc(it) }
|
||||
indexWriter.addDocuments(entityDocs)
|
||||
}
|
||||
}.also { duration ->
|
||||
logger.info { "Wrote ${entity.name} index in $duration" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@JmsListener(destination = QUEUE_SSE, selector = QUEUE_SSE_SELECTOR, containerFactory = TOPIC_FACTORY)
|
||||
fun consumeEvents(event: DomainEvent) {
|
||||
when (event) {
|
||||
is DomainEvent.SeriesAdded -> seriesDtoRepository.findByIdOrNull(event.series.id, "unused")?.toDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.SeriesUpdated -> seriesDtoRepository.findByIdOrNull(event.series.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Series, event.series.id, it) }
|
||||
is DomainEvent.SeriesDeleted -> deleteEntity(LuceneEntity.Series, event.series.id)
|
||||
|
||||
is DomainEvent.BookAdded -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.BookUpdated -> bookDtoRepository.findByIdOrNull(event.book.id, "unused")?.toDocument()?.let { updateEntity(LuceneEntity.Book, event.book.id, it) }
|
||||
is DomainEvent.BookDeleted -> deleteEntity(LuceneEntity.Book, event.book.id)
|
||||
|
||||
is DomainEvent.ReadListAdded -> readListRepository.findByIdOrNull(event.readList.id)?.toDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.ReadListUpdated -> readListRepository.findByIdOrNull(event.readList.id)?.toDocument()?.let { updateEntity(LuceneEntity.ReadList, event.readList.id, it) }
|
||||
is DomainEvent.ReadListDeleted -> deleteEntity(LuceneEntity.ReadList, event.readList.id)
|
||||
|
||||
is DomainEvent.CollectionAdded -> collectionRepository.findByIdOrNull(event.collection.id)?.toDocument()?.let { addEntity(it) }
|
||||
is DomainEvent.CollectionUpdated -> collectionRepository.findByIdOrNull(event.collection.id)?.toDocument()?.let { updateEntity(LuceneEntity.Collection, event.collection.id, it) }
|
||||
is DomainEvent.CollectionDeleted -> deleteEntity(LuceneEntity.Collection, event.collection.id)
|
||||
|
||||
else -> Unit
|
||||
}
|
||||
}
|
||||
|
||||
private fun addEntity(doc: Document) {
|
||||
luceneHelper.getIndexWriter().use { indexWriter ->
|
||||
indexWriter.addDocument(doc)
|
||||
}
|
||||
}
|
||||
|
||||
private fun updateEntity(entity: LuceneEntity, entityId: String, newDoc: Document) {
|
||||
luceneHelper.getIndexWriter().use { indexWriter ->
|
||||
indexWriter.updateDocument(Term(entity.id, entityId), newDoc)
|
||||
}
|
||||
}
|
||||
|
||||
private fun deleteEntity(entity: LuceneEntity, entityId: String) {
|
||||
luceneHelper.getIndexWriter().use { indexWriter ->
|
||||
indexWriter.deleteDocuments(Term(entity.id, entityId))
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package org.gotson.komga.interfaces.scheduler
|
||||
|
||||
import mu.KotlinLogging
|
||||
import org.gotson.komga.application.tasks.HIGHEST_PRIORITY
|
||||
import org.gotson.komga.application.tasks.TaskReceiver
|
||||
import org.gotson.komga.infrastructure.search.LuceneHelper
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent
|
||||
import org.springframework.context.annotation.Profile
|
||||
import org.springframework.context.event.EventListener
|
||||
import org.springframework.stereotype.Component
|
||||
|
||||
private val logger = KotlinLogging.logger {}
|
||||
|
||||
@Profile("!test")
|
||||
@Component
|
||||
class SearchIndexController(
|
||||
private val luceneHelper: LuceneHelper,
|
||||
private val taskReceiver: TaskReceiver,
|
||||
) {
|
||||
|
||||
@EventListener(ApplicationReadyEvent::class)
|
||||
fun createIndexIfNoneExist() {
|
||||
if (!luceneHelper.indexExists()) {
|
||||
logger.info { "Lucene index not found, trigger rebuild" }
|
||||
taskReceiver.rebuildIndex(HIGHEST_PRIORITY)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
komga:
|
||||
database:
|
||||
file: ./localdb.sqlite
|
||||
lucene:
|
||||
data-directory: ./lucene/localdb
|
||||
|
@ -14,6 +14,8 @@ komga:
|
||||
- "@eaDir"
|
||||
database:
|
||||
file: \${user.home}/.komga/database.sqlite
|
||||
lucene:
|
||||
data-directory: \${user.home}/.komga/lucene
|
||||
|
||||
spring:
|
||||
flyway:
|
||||
|
@ -1,9 +1,16 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import com.ninjasquad.springmockk.MockkBean
|
||||
import io.mockk.Runs
|
||||
import io.mockk.every
|
||||
import io.mockk.just
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.assertj.core.api.Assertions.assertThatCode
|
||||
import org.gotson.komga.application.events.EventPublisher
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.model.BookSearchWithReadProgress
|
||||
import org.gotson.komga.domain.model.KomgaUser
|
||||
import org.gotson.komga.domain.model.Media
|
||||
import org.gotson.komga.domain.model.ReadProgress
|
||||
import org.gotson.komga.domain.model.ReadStatus
|
||||
import org.gotson.komga.domain.model.makeBook
|
||||
@ -13,14 +20,17 @@ import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.domain.persistence.KomgaUserRepository
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
import org.gotson.komga.domain.persistence.MediaRepository
|
||||
import org.gotson.komga.domain.persistence.ReadProgressRepository
|
||||
import org.gotson.komga.domain.service.BookLifecycle
|
||||
import org.gotson.komga.domain.service.KomgaUserLifecycle
|
||||
import org.gotson.komga.domain.service.LibraryLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesLifecycle
|
||||
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
|
||||
import org.junit.jupiter.api.AfterAll
|
||||
import org.junit.jupiter.api.AfterEach
|
||||
import org.junit.jupiter.api.BeforeAll
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Nested
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
@ -30,6 +40,8 @@ import org.springframework.data.domain.PageRequest
|
||||
import org.springframework.data.domain.Sort
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
import java.net.URL
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
|
||||
@ExtendWith(SpringExtension::class)
|
||||
@SpringBootTest
|
||||
@ -37,33 +49,46 @@ class BookDtoDaoTest(
|
||||
@Autowired private val bookDtoDao: BookDtoDao,
|
||||
@Autowired private val bookRepository: BookRepository,
|
||||
@Autowired private val bookMetadataRepository: BookMetadataRepository,
|
||||
@Autowired private val mediaRepository: MediaRepository,
|
||||
@Autowired private val bookLifecycle: BookLifecycle,
|
||||
@Autowired private val seriesLifecycle: SeriesLifecycle,
|
||||
@Autowired private val libraryRepository: LibraryRepository,
|
||||
@Autowired private val libraryLifecycle: LibraryLifecycle,
|
||||
@Autowired private val readProgressRepository: ReadProgressRepository,
|
||||
@Autowired private val userRepository: KomgaUserRepository,
|
||||
@Autowired private val userLifecycle: KomgaUserLifecycle
|
||||
@Autowired private val userLifecycle: KomgaUserLifecycle,
|
||||
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
|
||||
) {
|
||||
|
||||
private val library = makeLibrary()
|
||||
private var series = makeSeries("Series")
|
||||
private val user = KomgaUser("user@example.org", "", false)
|
||||
|
||||
@MockkBean
|
||||
private lateinit var mockEventPublisher: EventPublisher
|
||||
|
||||
@BeforeAll
|
||||
fun setup() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
libraryRepository.insert(library)
|
||||
series = seriesLifecycle.createSeries(series.copy(libraryId = library.id))
|
||||
userRepository.insert(user)
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun resetMocks() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
fun deleteBooks() {
|
||||
bookLifecycle.deleteMany(bookRepository.findAll())
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
fun tearDown() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
userRepository.findAll().forEach {
|
||||
userLifecycle.deleteUser(it)
|
||||
}
|
||||
@ -101,7 +126,7 @@ class BookDtoDaoTest(
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.first().readProgress?.completed).isTrue()
|
||||
assertThat(found.first().readProgress?.completed).isTrue
|
||||
assertThat(found.first().name).isEqualTo("2")
|
||||
}
|
||||
|
||||
@ -137,7 +162,7 @@ class BookDtoDaoTest(
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.first().readProgress?.completed).isFalse()
|
||||
assertThat(found.first().readProgress?.completed).isFalse
|
||||
assertThat(found.first().name).isEqualTo("1")
|
||||
}
|
||||
|
||||
@ -307,6 +332,8 @@ class BookDtoDaoTest(
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "batman"),
|
||||
@ -337,6 +364,8 @@ class BookDtoDaoTest(
|
||||
bookMetadataRepository.update(it.copy(title = "Éric le bleu"))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "eric"),
|
||||
@ -348,119 +377,353 @@ class BookDtoDaoTest(
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le bleu")
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by ISBN then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
book1,
|
||||
makeBook("Robin", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
@Test
|
||||
fun `given books when searching by ISBN then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
book1,
|
||||
makeBook("Robin", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(isbn = "9782413016878"))
|
||||
}
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(isbn = "9782413016878"))
|
||||
}
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "9782413016878"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.name }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by term containing hyphens then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Another X-Men adventure", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("X-Men", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "x-men"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactly("X-Men", "Another X-Men adventure")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by single letter then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("J", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Adventures of J. J.", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Jackal", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "j"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactly("J", "Adventures of J. J.")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `when searching by unknown field then empty result are returned and no exception is thrown`() {
|
||||
assertThatCode {
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "publisher:batman"),
|
||||
BookSearchWithReadProgress(searchTerm = "9782413016878"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(0)
|
||||
}.doesNotThrowAnyException()
|
||||
}
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books in CJK when searching by CJK term then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("[不道德公會][河添太一 ][東立]Vol.04-搬运", seriesId = series.id, libraryId = library.id, url = URL("file:/file.cbz")),
|
||||
@Test
|
||||
fun `given books when searching by tags then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
book1,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "不道德"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(tags = setOf("tag1")))
|
||||
}
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "tag:tag1"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by authors then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
book1,
|
||||
)
|
||||
)
|
||||
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(authors = listOf(Author("bob", "writer"))))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val foundGeneric = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "author:bob"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
val foundByRole = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "writer:bob"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
val notFound = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "penciller:bob"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(foundGeneric).hasSize(1)
|
||||
assertThat(foundGeneric.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
assertThat(foundByRole).hasSize(1)
|
||||
assertThat(foundByRole.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
assertThat(notFound).isEmpty()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by release year then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book1))
|
||||
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 5, 12)))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "release_date:1999"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by release year range then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
val book2 = makeBook("Éric le bleu", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book1, book2))
|
||||
|
||||
bookMetadataRepository.findById(book1.id).let {
|
||||
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 5, 12)))
|
||||
}
|
||||
bookMetadataRepository.findById(book2.id).let {
|
||||
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(2005, 5, 12)))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "release_date:[1990 TO 2010]"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge", "Éric le bleu")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by media status then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book1))
|
||||
|
||||
mediaRepository.findById(book1.id).let {
|
||||
mediaRepository.update(it.copy(status = Media.Status.ERROR))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "status:error"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by deleted then results are matched`() {
|
||||
// given
|
||||
val book1 = makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id)
|
||||
.copy(deletedDate = LocalDateTime.now())
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
book1,
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "deleted:true"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Éric le rouge")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books with dots in title when searching by title then results are matched`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("S.W.O.R.D.", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "s.w.o.r.d."),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("S.W.O.R.D.")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching with multiple words then results are matched`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("Éric le rouge", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Robin and Batman", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman and Robin", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "batman robin"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.metadata.title }).containsExactlyInAnyOrder("Batman and Robin", "Robin and Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by term containing hyphens then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("Batman", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Another X-Men adventure", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("X-Men", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "x-men"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactly("X-Men", "Another X-Men adventure")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books when searching by single letter then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("J", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Adventures of J. J.", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Jackal", seriesId = series.id, libraryId = library.id),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "j"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactly("J", "Adventures of J. J.")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `when searching by unknown field then empty result are returned and no exception is thrown`() {
|
||||
assertThatCode {
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "publisher:batman"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(0)
|
||||
}.doesNotThrowAnyException()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given books in CJK when searching by CJK term then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("[不道德公會][河添太一 ][東立]Vol.04-搬运", seriesId = series.id, libraryId = library.id, url = URL("file:/file.cbz")),
|
||||
)
|
||||
)
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = bookDtoDao.findAll(
|
||||
BookSearchWithReadProgress(searchTerm = "不道德"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,30 +1,46 @@
|
||||
package org.gotson.komga.infrastructure.jooq
|
||||
|
||||
import com.ninjasquad.springmockk.MockkBean
|
||||
import io.mockk.Runs
|
||||
import io.mockk.every
|
||||
import io.mockk.just
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.gotson.komga.application.events.EventPublisher
|
||||
import org.gotson.komga.domain.model.Author
|
||||
import org.gotson.komga.domain.model.KomgaUser
|
||||
import org.gotson.komga.domain.model.ReadProgress
|
||||
import org.gotson.komga.domain.model.ReadStatus
|
||||
import org.gotson.komga.domain.model.SeriesMetadata
|
||||
import org.gotson.komga.domain.model.SeriesSearchWithReadProgress
|
||||
import org.gotson.komga.domain.model.makeBook
|
||||
import org.gotson.komga.domain.model.makeLibrary
|
||||
import org.gotson.komga.domain.model.makeSeries
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.BookRepository
|
||||
import org.gotson.komga.domain.persistence.KomgaUserRepository
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
import org.gotson.komga.domain.persistence.ReadProgressRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesRepository
|
||||
import org.gotson.komga.domain.service.KomgaUserLifecycle
|
||||
import org.gotson.komga.domain.service.LibraryLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesMetadataLifecycle
|
||||
import org.gotson.komga.infrastructure.search.SearchIndexLifecycle
|
||||
import org.junit.jupiter.api.AfterAll
|
||||
import org.junit.jupiter.api.AfterEach
|
||||
import org.junit.jupiter.api.BeforeAll
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Nested
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.boot.test.context.SpringBootTest
|
||||
import org.springframework.data.domain.PageRequest
|
||||
import org.springframework.data.domain.Sort
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalDateTime
|
||||
|
||||
@ExtendWith(SpringExtension::class)
|
||||
@SpringBootTest
|
||||
@ -32,30 +48,45 @@ class SeriesDtoDaoTest(
|
||||
@Autowired private val seriesDtoDao: SeriesDtoDao,
|
||||
@Autowired private val bookRepository: BookRepository,
|
||||
@Autowired private val seriesRepository: SeriesRepository,
|
||||
@Autowired private val seriesMetadataRepository: SeriesMetadataRepository,
|
||||
@Autowired private val bookMetadataRepository: BookMetadataRepository,
|
||||
@Autowired private val seriesLifecycle: SeriesLifecycle,
|
||||
@Autowired private val seriesMetadataLifecycle: SeriesMetadataLifecycle,
|
||||
@Autowired private val libraryRepository: LibraryRepository,
|
||||
@Autowired private val libraryLifecycle: LibraryLifecycle,
|
||||
@Autowired private val readProgressRepository: ReadProgressRepository,
|
||||
@Autowired private val userRepository: KomgaUserRepository,
|
||||
@Autowired private val userLifecycle: KomgaUserLifecycle
|
||||
@Autowired private val userLifecycle: KomgaUserLifecycle,
|
||||
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
|
||||
) {
|
||||
|
||||
private val library = makeLibrary()
|
||||
private val user = KomgaUser("user@example.org", "", false)
|
||||
|
||||
@MockkBean
|
||||
private lateinit var mockEventPublisher: EventPublisher
|
||||
|
||||
@BeforeAll
|
||||
fun setup() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
libraryRepository.insert(library)
|
||||
userRepository.insert(user)
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun resetMocks() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
fun deleteSeries() {
|
||||
seriesLifecycle.deleteMany(seriesRepository.findAll())
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
fun tearDown() {
|
||||
every { mockEventPublisher.publishEvent(any()) } just Runs
|
||||
userRepository.findAll().forEach {
|
||||
userLifecycle.deleteUser(it)
|
||||
}
|
||||
@ -95,148 +126,545 @@ class SeriesDtoDaoTest(
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read series then only read series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
@Nested
|
||||
inner class ReadProgress {
|
||||
@Test
|
||||
fun `given series in various read status when searching for read series then only read series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
|
||||
assertThat(found.first().booksReadCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("2")
|
||||
assertThat(found.first().booksReadCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("2")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for unread series then only unread series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
|
||||
assertThat(found.first().booksUnreadCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("3")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for in progress series then only in progress series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
|
||||
assertThat(found.first().booksInProgressCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("1")
|
||||
|
||||
assertThat(found.last().booksInProgressCount).isEqualTo(1)
|
||||
assertThat(found.last().name).isEqualTo("4")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and unread series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("2", "3")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "4")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for unread and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD, ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "3", "4")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and unread and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS, ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(4)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching without read progress then all series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(4)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for unread series then only unread series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
@Nested
|
||||
inner class FullTextSearch {
|
||||
@Test
|
||||
fun `given series when searching by term then results are ordered by rank`() {
|
||||
// given
|
||||
seriesLifecycle.createSeries(makeSeries("The incredible adventures of Batman, the man who is also a bat!", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "batman"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
assertThat(found.first().booksUnreadCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("3")
|
||||
}
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman", "Batman and Robin", "The incredible adventures of Batman, the man who is also a bat!")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for in progress series then only in progress series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
@Test
|
||||
fun `given series when searching by publisher then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(publisher = "Vertigo"))
|
||||
}
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
assertThat(found.first().booksInProgressCount).isEqualTo(3)
|
||||
assertThat(found.first().name).isEqualTo("1")
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "publisher:vertigo"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
assertThat(found.last().booksInProgressCount).isEqualTo(1)
|
||||
assertThat(found.last().name).isEqualTo("4")
|
||||
}
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and unread series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
@Test
|
||||
fun `given series when searching by status then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(status = SeriesMetadata.Status.HIATUS))
|
||||
}
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(2)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("2", "3")
|
||||
}
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "status:hiatus"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "4")
|
||||
}
|
||||
@Test
|
||||
fun `given series when searching by reading direction then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for unread and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(readingDirection = SeriesMetadata.ReadingDirection.LEFT_TO_RIGHT))
|
||||
}
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.UNREAD, ReadStatus.IN_PROGRESS)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(3)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "3", "4")
|
||||
}
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "reading_direction:left_to_right"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching for read and unread and in progress series then only matching series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(readStatus = listOf(ReadStatus.READ, ReadStatus.IN_PROGRESS, ReadStatus.UNREAD)),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
@Test
|
||||
fun `given series when searching by age rating then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(4)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
|
||||
}
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(ageRating = 12))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series in various read status when searching without read progress then all series are returned`() {
|
||||
// given
|
||||
setupSeries()
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(),
|
||||
user.id,
|
||||
PageRequest.of(0, 20)
|
||||
).sortedBy { it.name }
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "age_rating:12"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(4)
|
||||
assertThat(found.map { it.name }).containsExactlyInAnyOrder("1", "2", "3", "4")
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by language then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(language = "en-us"))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "language:en-us"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by tags then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(tags = setOf("seriestag")))
|
||||
}
|
||||
bookMetadataRepository.findById(book.id).let {
|
||||
bookMetadataRepository.update(it.copy(tags = setOf("booktag")))
|
||||
}
|
||||
|
||||
seriesMetadataLifecycle.aggregateMetadata(series)
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val foundByBookTag = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "book_tag:booktag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val notFoundByBookTag = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "book_tag:seriestag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val foundBySeriesTag = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "series_tag:seriestag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val notFoundBySeriesTag = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "series_tag:booktag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val foundByTagFromBook = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "tag:booktag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val foundByTagFromSeries = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "tag:seriestag"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(foundByBookTag).hasSize(1)
|
||||
assertThat(foundByBookTag.map { it.metadata.title }).containsExactly("Batman")
|
||||
|
||||
assertThat(notFoundByBookTag).isEmpty()
|
||||
|
||||
assertThat(foundBySeriesTag).hasSize(1)
|
||||
assertThat(foundBySeriesTag.map { it.metadata.title }).containsExactly("Batman")
|
||||
|
||||
assertThat(notFoundBySeriesTag).isEmpty()
|
||||
|
||||
assertThat(foundByTagFromBook).hasSize(1)
|
||||
assertThat(foundByTagFromBook.map { it.metadata.title }).containsExactly("Batman")
|
||||
|
||||
assertThat(foundByTagFromSeries).hasSize(1)
|
||||
assertThat(foundByTagFromSeries.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by genre then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(genres = setOf("action")))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "genre:action"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by total book count then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(totalBookCount = 5))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "total_book_count:5"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by book count then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
seriesLifecycle.addBooks(
|
||||
series,
|
||||
listOf(
|
||||
makeBook("Batman 01", seriesId = series.id, libraryId = library.id),
|
||||
makeBook("Batman 02", seriesId = series.id, libraryId = library.id)
|
||||
)
|
||||
)
|
||||
seriesLifecycle.sortBooks(series)
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(genres = setOf("action")))
|
||||
}
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "book_count:2"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by authors then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
bookMetadataRepository.findById(book.id).let {
|
||||
bookMetadataRepository.update(
|
||||
it.copy(
|
||||
authors = listOf(
|
||||
Author("David", "penciller")
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
seriesMetadataLifecycle.aggregateMetadata(series)
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val foundGeneric = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "author:david"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val foundByRole = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "penciller:david"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
val notFoundByRole = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "writer:david"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(foundGeneric).hasSize(1)
|
||||
assertThat(foundGeneric.map { it.metadata.title }).containsExactly("Batman")
|
||||
|
||||
assertThat(foundByRole).hasSize(1)
|
||||
assertThat(foundByRole.map { it.metadata.title }).containsExactly("Batman")
|
||||
|
||||
assertThat(notFoundByRole).isEmpty()
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by release year then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id))
|
||||
val book = makeBook("Batman 01", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
bookMetadataRepository.findById(book.id).let {
|
||||
bookMetadataRepository.update(it.copy(releaseDate = LocalDate.of(1999, 10, 10)))
|
||||
}
|
||||
|
||||
seriesMetadataLifecycle.aggregateMetadata(series)
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "release_date:1999"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given series when searching by deleted then results are matched`() {
|
||||
// given
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Batman", library.id).copy(deletedDate = LocalDateTime.now()))
|
||||
seriesLifecycle.createSeries(makeSeries("Batman and Robin", library.id))
|
||||
|
||||
searchIndexLifecycle.rebuildIndex()
|
||||
|
||||
// when
|
||||
val found = seriesDtoDao.findAll(
|
||||
SeriesSearchWithReadProgress(searchTerm = "deleted:true"),
|
||||
user.id,
|
||||
UnpagedSorted(Sort.by("relevance")),
|
||||
).content
|
||||
|
||||
// then
|
||||
assertThat(found).hasSize(1)
|
||||
assertThat(found.map { it.metadata.title }).containsExactly("Batman")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,133 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class MultilingualAnalyzerTest {
|
||||
|
||||
private val analyzer = MultiLingualAnalyzer()
|
||||
|
||||
private fun Analyzer.getTokens(text: String): List<String> {
|
||||
val ts = tokenStream("text", text)
|
||||
|
||||
val tokens = mutableListOf<String>()
|
||||
ts.use { ts ->
|
||||
ts.reset()
|
||||
while (ts.incrementToken()) {
|
||||
ts.reflectWith { _, key, value -> if (key == "term") tokens += value.toString() }
|
||||
}
|
||||
ts.end()
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `english text`() {
|
||||
// given
|
||||
val text = "The incredible adventures of Batman, the man who is also a bat!"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("the", "incredible", "adventures", "of", "batman", "the", "man", "who", "is", "also", "a", "bat")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun accents() {
|
||||
// given
|
||||
val text = "Éric èl rojo"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactlyInAnyOrder("eric", "el", "rojo")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun isbn() {
|
||||
// given
|
||||
val text = "9782413016878"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("9782413016878")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `single letter`() {
|
||||
// given
|
||||
val text = "J"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("j")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `chinese mixed`() {
|
||||
// given
|
||||
val text = "[不道德公會][河添太一 ][東立]Vol.04-搬运"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("不道", "道德", "德公", "公會", "河添", "添太", "太一", "東立", "vol", "04", "搬运")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `chinese only`() {
|
||||
// given
|
||||
val text = "不道德公會河添太一東立搬运"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("不道", "道德", "德公", "公會", "會河", "河添", "添太", "太一", "一東", "東立", "立搬", "搬运")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `hiragana only`() {
|
||||
// given
|
||||
val text = "探偵はもう、死んでいる。"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("探偵", "偵は", "はも", "もう", "死ん", "んで", "でい", "いる")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `katakana only`() {
|
||||
// given
|
||||
val text = "ワンパンマン"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("ワン", "ンパ", "パン", "ンマ", "マン")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `korean only`() {
|
||||
// given
|
||||
val text = "고교생을 환불해 주세요"
|
||||
|
||||
// when
|
||||
val tokens = analyzer.getTokens(text)
|
||||
|
||||
// then
|
||||
assertThat(tokens).containsExactly("고교", "교생", "생을", "환불", "불해", "주세", "세요")
|
||||
}
|
||||
}
|
@ -0,0 +1,323 @@
|
||||
package org.gotson.komga.infrastructure.search
|
||||
|
||||
import com.ninjasquad.springmockk.MockkBean
|
||||
import io.mockk.every
|
||||
import io.mockk.slot
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.gotson.komga.application.events.EventPublisher
|
||||
import org.gotson.komga.domain.model.DomainEvent
|
||||
import org.gotson.komga.domain.model.ReadList
|
||||
import org.gotson.komga.domain.model.SeriesCollection
|
||||
import org.gotson.komga.domain.model.makeBook
|
||||
import org.gotson.komga.domain.model.makeLibrary
|
||||
import org.gotson.komga.domain.model.makeSeries
|
||||
import org.gotson.komga.domain.persistence.BookMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.LibraryRepository
|
||||
import org.gotson.komga.domain.persistence.ReadListRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesCollectionRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesMetadataRepository
|
||||
import org.gotson.komga.domain.persistence.SeriesRepository
|
||||
import org.gotson.komga.domain.service.BookLifecycle
|
||||
import org.gotson.komga.domain.service.LibraryLifecycle
|
||||
import org.gotson.komga.domain.service.ReadListLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesCollectionLifecycle
|
||||
import org.gotson.komga.domain.service.SeriesLifecycle
|
||||
import org.junit.jupiter.api.AfterAll
|
||||
import org.junit.jupiter.api.AfterEach
|
||||
import org.junit.jupiter.api.BeforeAll
|
||||
import org.junit.jupiter.api.BeforeEach
|
||||
import org.junit.jupiter.api.Nested
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.springframework.beans.factory.annotation.Autowired
|
||||
import org.springframework.boot.test.context.SpringBootTest
|
||||
import org.springframework.data.domain.Pageable
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension
|
||||
|
||||
@ExtendWith(SpringExtension::class)
|
||||
@SpringBootTest
|
||||
class SearchIndexLifecycleTest(
|
||||
@Autowired private val libraryRepository: LibraryRepository,
|
||||
@Autowired private val libraryLifecycle: LibraryLifecycle,
|
||||
@Autowired private val seriesRepository: SeriesRepository,
|
||||
@Autowired private val seriesLifecycle: SeriesLifecycle,
|
||||
@Autowired private val seriesMetadataRepository: SeriesMetadataRepository,
|
||||
@Autowired private val bookMetadataRepository: BookMetadataRepository,
|
||||
@Autowired private val bookLifecycle: BookLifecycle,
|
||||
@Autowired private val collectionLifecycle: SeriesCollectionLifecycle,
|
||||
@Autowired private val collectionRepository: SeriesCollectionRepository,
|
||||
@Autowired private val readListLifecycle: ReadListLifecycle,
|
||||
@Autowired private val readListRepository: ReadListRepository,
|
||||
@Autowired private val searchIndexLifecycle: SearchIndexLifecycle,
|
||||
@Autowired private val luceneHelper: LuceneHelper,
|
||||
) {
|
||||
|
||||
private val library = makeLibrary()
|
||||
|
||||
@MockkBean
|
||||
private lateinit var mockEventPublisher: EventPublisher
|
||||
|
||||
@BeforeAll
|
||||
fun setup() {
|
||||
captureEvents()
|
||||
libraryRepository.insert(library)
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
fun resetMocks() {
|
||||
captureEvents()
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
fun deleteEntities() {
|
||||
seriesLifecycle.deleteMany(seriesRepository.findAll())
|
||||
collectionRepository.findAll(pageable = Pageable.unpaged()).forEach {
|
||||
collectionLifecycle.deleteCollection(it)
|
||||
}
|
||||
readListRepository.findAll(pageable = Pageable.unpaged()).forEach {
|
||||
readListLifecycle.deleteReadList(it)
|
||||
}
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
fun tearDown() {
|
||||
captureEvents()
|
||||
libraryRepository.findAll().forEach {
|
||||
libraryLifecycle.deleteLibrary(it)
|
||||
}
|
||||
}
|
||||
|
||||
private fun captureEvents() {
|
||||
val eventSlot = slot<DomainEvent>()
|
||||
every { mockEventPublisher.publishEvent(capture(eventSlot)) } answers {
|
||||
searchIndexLifecycle.consumeEvents(eventSlot.captured)
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
inner class Book {
|
||||
@Test
|
||||
fun `given empty index when adding an entity then it is added to the index`() {
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
seriesLifecycle.addBooks(series, listOf(makeBook("book", seriesId = series.id, libraryId = library.id)))
|
||||
|
||||
val found = luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10)
|
||||
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when updating then it is updated in the index`() {
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
val book = makeBook("book", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book))
|
||||
|
||||
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
bookMetadataRepository.findById(book.id).let {
|
||||
bookMetadataRepository.update(it.copy(title = "updated"))
|
||||
}
|
||||
mockEventPublisher.publishEvent(DomainEvent.BookUpdated(book))
|
||||
|
||||
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Book, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when deleting then it is removed from the index`() {
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
val book = makeBook("book", seriesId = series.id, libraryId = library.id)
|
||||
seriesLifecycle.addBooks(series, listOf(book))
|
||||
|
||||
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
bookLifecycle.deleteOne(book)
|
||||
|
||||
luceneHelper.searchEntitiesIds("book", LuceneEntity.Book, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
inner class Series {
|
||||
@Test
|
||||
fun `given empty index when adding an entity then it is added to the index`() {
|
||||
seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
|
||||
val found = luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10)
|
||||
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when updating then it is updated in the index`() {
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
|
||||
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
seriesMetadataRepository.findById(series.id).let {
|
||||
seriesMetadataRepository.update(it.copy(title = "updated"))
|
||||
}
|
||||
mockEventPublisher.publishEvent(DomainEvent.SeriesUpdated(series))
|
||||
|
||||
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Series, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when deleting then it is removed from the index`() {
|
||||
val series = seriesLifecycle.createSeries(makeSeries("Series", libraryId = library.id))
|
||||
|
||||
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
seriesLifecycle.deleteMany(listOf(series))
|
||||
|
||||
luceneHelper.searchEntitiesIds("series", LuceneEntity.Series, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
inner class Collection {
|
||||
@Test
|
||||
fun `given empty index when adding an entity then it is added to the index`() {
|
||||
val collection = SeriesCollection("collection")
|
||||
collectionLifecycle.addCollection(collection)
|
||||
|
||||
val found = luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10)
|
||||
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when updating then it is updated in the index`() {
|
||||
val collection = SeriesCollection("collection")
|
||||
collectionLifecycle.addCollection(collection)
|
||||
|
||||
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
collectionRepository.findByIdOrNull(collection.id)?.let {
|
||||
collectionRepository.update(it.copy(name = "updated"))
|
||||
}
|
||||
mockEventPublisher.publishEvent(DomainEvent.CollectionUpdated(collection))
|
||||
|
||||
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
luceneHelper.searchEntitiesIds("updated", LuceneEntity.Collection, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when deleting then it is removed from the index`() {
|
||||
val collection = SeriesCollection("collection")
|
||||
collectionLifecycle.addCollection(collection)
|
||||
|
||||
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
collectionLifecycle.deleteCollection(collection)
|
||||
|
||||
luceneHelper.searchEntitiesIds("collection", LuceneEntity.Collection, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
inner class ReadList {
|
||||
@Test
|
||||
fun `given empty index when adding an entity then it is added to the index`() {
|
||||
val readList = ReadList("readlist")
|
||||
readListLifecycle.addReadList(readList)
|
||||
|
||||
val found = luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10)
|
||||
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when updating then it is updated in the index`() {
|
||||
val readList = org.gotson.komga.domain.model.ReadList("readlist")
|
||||
readListLifecycle.addReadList(readList)
|
||||
|
||||
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
readListRepository.findByIdOrNull(readList.id)?.let {
|
||||
readListRepository.update(it.copy(name = "updated"))
|
||||
}
|
||||
mockEventPublisher.publishEvent(DomainEvent.ReadListUpdated(readList))
|
||||
|
||||
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
luceneHelper.searchEntitiesIds("updated", LuceneEntity.ReadList, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `given an entity when deleting then it is removed from the index`() {
|
||||
val readList = org.gotson.komga.domain.model.ReadList("readlist")
|
||||
readListLifecycle.addReadList(readList)
|
||||
|
||||
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).hasSize(1)
|
||||
}
|
||||
|
||||
readListLifecycle.deleteReadList(readList)
|
||||
|
||||
luceneHelper.searchEntitiesIds("readlist", LuceneEntity.ReadList, 10).let { found ->
|
||||
assertThat(found).isNotNull
|
||||
assertThat(found).isEmpty()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user