Fix/backup with duplicated chapters failure (#1269)

* Extract logic to restore manga chapters into function

* Extract logic to restore manga categories into function

* Extract logic to restore manga trackers into function

* Handle duplicated chapters in backup

In case a backup contained duplicated chapters for a manga, the manga failed to restore since the ChapterTable has a unique constraint to prevent multiple chapters with the same "url" and "mangaId"
This commit is contained in:
schroda
2025-02-15 01:02:02 +01:00
committed by GitHub
parent 37f57c0c55
commit 733ba16af2
2 changed files with 128 additions and 62 deletions

View File

@@ -48,6 +48,7 @@ import suwayomi.tachidesk.manga.model.dataclass.TrackRecordDataClass
import suwayomi.tachidesk.manga.model.table.CategoryTable
import suwayomi.tachidesk.manga.model.table.ChapterTable
import suwayomi.tachidesk.manga.model.table.MangaTable
import suwayomi.tachidesk.server.database.dbTransaction
import java.io.InputStream
import java.util.Date
import java.util.Timer
@@ -56,6 +57,11 @@ import java.util.concurrent.TimeUnit
import kotlin.math.max
import suwayomi.tachidesk.manga.impl.track.Track as Tracker
enum class RestoreMode {
NEW,
EXISTING,
}
object ProtoBackupImport : ProtoBackupBase() {
private val scope = CoroutineScope(SupervisorJob() + Dispatchers.Default)
@@ -312,32 +318,10 @@ object ProtoBackupImport : ProtoBackupBase() {
clearThumbnail(mangaId)
// insert chapter data
val chaptersLength = chapters.size
ChapterTable.batchInsert(chapters) { chapter ->
this[ChapterTable.url] = chapter.url
this[ChapterTable.name] = chapter.name
if (chapter.date_upload == 0L) {
this[ChapterTable.date_upload] = chapter.date_fetch
} else {
this[ChapterTable.date_upload] = chapter.date_upload
}
this[ChapterTable.chapter_number] = chapter.chapter_number
this[ChapterTable.scanlator] = chapter.scanlator
this[ChapterTable.sourceOrder] = chaptersLength - chapter.source_order
this[ChapterTable.manga] = mangaId
this[ChapterTable.isRead] = chapter.read
this[ChapterTable.lastPageRead] = chapter.last_page_read
this[ChapterTable.isBookmarked] = chapter.bookmark
this[ChapterTable.fetchedAt] = TimeUnit.MILLISECONDS.toSeconds(chapter.date_fetch)
}
restoreMangaChapterData(mangaId, RestoreMode.NEW, chapters)
// insert categories
categories.forEach { backupCategoryOrder ->
CategoryManga.addMangaToCategory(mangaId, categoryMapping[backupCategoryOrder]!!)
}
restoreMangaCategoryData(mangaId, categories, categoryMapping)
mangaId
}
@@ -363,49 +347,100 @@ object ProtoBackupImport : ProtoBackupBase() {
}
// merge chapter data
val chaptersLength = chapters.size
val dbChapters = ChapterTable.selectAll().where { ChapterTable.manga eq mangaId }
chapters.forEach { chapter ->
val dbChapter = dbChapters.find { it[ChapterTable.url] == chapter.url }
if (dbChapter == null) {
ChapterTable.insert {
it[url] = chapter.url
it[name] = chapter.name
if (chapter.date_upload == 0L) {
it[date_upload] = chapter.date_fetch
} else {
it[date_upload] = chapter.date_upload
}
it[chapter_number] = chapter.chapter_number
it[scanlator] = chapter.scanlator
it[sourceOrder] = chaptersLength - chapter.source_order
it[ChapterTable.manga] = mangaId
it[isRead] = chapter.read
it[lastPageRead] = chapter.last_page_read
it[isBookmarked] = chapter.bookmark
}
} else {
ChapterTable.update({ (ChapterTable.url eq dbChapter[ChapterTable.url]) and (ChapterTable.manga eq mangaId) }) {
it[isRead] = chapter.read || dbChapter[isRead]
it[lastPageRead] = max(chapter.last_page_read, dbChapter[lastPageRead])
it[isBookmarked] = chapter.bookmark || dbChapter[isBookmarked]
}
}
}
restoreMangaChapterData(mangaId, RestoreMode.EXISTING, chapters)
// merge categories
categories.forEach { backupCategoryOrder ->
CategoryManga.addMangaToCategory(mangaId, categoryMapping[backupCategoryOrder]!!)
}
restoreMangaCategoryData(mangaId, categories, categoryMapping)
mangaId
}
}
restoreMangaTrackerData(mangaId, tracks)
// TODO: insert/merge history
}
private fun restoreMangaChapterData(
mangaId: Int,
restoreMode: RestoreMode,
chapters: List<Chapter>,
) = dbTransaction {
val uniqueChapters = chapters.distinctBy { it.url }
val chaptersLength = uniqueChapters.size
if (restoreMode == RestoreMode.NEW) {
ChapterTable.batchInsert(uniqueChapters) { chapter ->
this[ChapterTable.url] = chapter.url
this[ChapterTable.name] = chapter.name
if (chapter.date_upload == 0L) {
this[ChapterTable.date_upload] = chapter.date_fetch
} else {
this[ChapterTable.date_upload] = chapter.date_upload
}
this[ChapterTable.chapter_number] = chapter.chapter_number
this[ChapterTable.scanlator] = chapter.scanlator
this[ChapterTable.sourceOrder] = chaptersLength - chapter.source_order
this[ChapterTable.manga] = mangaId
this[ChapterTable.isRead] = chapter.read
this[ChapterTable.lastPageRead] = chapter.last_page_read
this[ChapterTable.isBookmarked] = chapter.bookmark
this[ChapterTable.fetchedAt] = TimeUnit.MILLISECONDS.toSeconds(chapter.date_fetch)
}
}
// merge chapter data
val dbChapters = ChapterTable.selectAll().where { ChapterTable.manga eq mangaId }
uniqueChapters.forEach { chapter ->
val dbChapter = dbChapters.find { it[ChapterTable.url] == chapter.url }
if (dbChapter == null) {
ChapterTable.insert {
it[url] = chapter.url
it[name] = chapter.name
if (chapter.date_upload == 0L) {
it[date_upload] = chapter.date_fetch
} else {
it[date_upload] = chapter.date_upload
}
it[chapter_number] = chapter.chapter_number
it[scanlator] = chapter.scanlator
it[sourceOrder] = chaptersLength - chapter.source_order
it[ChapterTable.manga] = mangaId
it[isRead] = chapter.read
it[lastPageRead] = chapter.last_page_read
it[isBookmarked] = chapter.bookmark
}
} else {
ChapterTable.update({ (ChapterTable.url eq dbChapter[ChapterTable.url]) and (ChapterTable.manga eq mangaId) }) {
it[isRead] = chapter.read || dbChapter[isRead]
it[lastPageRead] = max(chapter.last_page_read, dbChapter[lastPageRead])
it[isBookmarked] = chapter.bookmark || dbChapter[isBookmarked]
}
}
}
}
private fun restoreMangaCategoryData(
mangaId: Int,
categories: List<Int>,
categoryMapping: Map<Int, Int>,
) {
categories.forEach { backupCategoryOrder ->
CategoryManga.addMangaToCategory(mangaId, categoryMapping[backupCategoryOrder]!!)
}
}
private fun restoreMangaTrackerData(
mangaId: Int,
tracks: List<BackupTracking>,
) {
val dbTrackRecordsByTrackerId =
Tracker
.getTrackRecordsByMangaId(mangaId)
@@ -440,8 +475,6 @@ object ProtoBackupImport : ProtoBackupBase() {
existingTracks.forEach(Tracker::updateTrackRecord)
newTracks.forEach(Tracker::insertTrackRecord)
// TODO: insert/merge history
}
private fun TrackRecordDataClass.forComparison() = this.copy(id = 0, mangaId = 0)

View File

@@ -0,0 +1,33 @@
package suwayomi.tachidesk.server.database
import org.jetbrains.exposed.sql.Transaction
import org.jetbrains.exposed.sql.transactions.TransactionManager
import org.jetbrains.exposed.sql.transactions.transaction
/**
* Performs the given transaction block inside the parent transaction or creates a new transaction if necessary.
*
* Any rollback or exception in the inner transaction will be propagated to the parent transaction.
*/
fun <T> dbTransaction(block: Transaction.() -> T): T {
val currentTransaction = TransactionManager.currentOrNull()
return if (currentTransaction == null) {
transaction { block() }
} else {
block(currentTransaction)
}
}
/**
* Creates a nested transaction.
*
* Any rollback or exception will only roll back the inner (nested) transaction, leaving the parent transaction unaffected.
*
* Only works in case "useNestedTransactions" is enabled.
*/
fun <T> nestedDbTransaction(block: Transaction.() -> T): T =
transaction {
check(db.useNestedTransactions) { "Nested transactions are not enabled." }
block()
}