mirror of
https://github.com/InsanusMokrassar/MicroUtils.git
synced 2025-09-19 15:29:24 +00:00
Compare commits
26 Commits
Author | SHA1 | Date | |
---|---|---|---|
3437f4c712 | |||
618f2dcd79 | |||
6df8ad3095 | |||
eda6221288 | |||
a9859f6a0d | |||
0db88bac25 | |||
daa3d9c0dd | |||
b343b33594 | |||
46e435a448 | |||
7fe62b4ffa | |||
9c94348a15 | |||
bac256e93e | |||
49f59aa129 | |||
800dab5be0 | |||
b9977527b2 | |||
c216dba69d | |||
d4148d52e3 | |||
2006a8cdd0 | |||
feb52ecbd1 | |||
42909c3b7a | |||
706a787163 | |||
1bc14bded6 | |||
f00cb81db1 | |||
ddb8e1efb4 | |||
7a650f5c2f | |||
fc6f5ae2ee |
23
CHANGELOG.md
23
CHANGELOG.md
@@ -1,5 +1,28 @@
|
||||
# Changelog
|
||||
|
||||
## 0.25.1
|
||||
|
||||
* `Coroutines`:
|
||||
* Add `SortedMapLikeBinaryTreeNode`
|
||||
* `Pagination`:
|
||||
* `Compose`:
|
||||
* One more rework of `InfinityPagedComponent` and `PagedComponent`
|
||||
|
||||
## 0.25.0
|
||||
|
||||
* `Repos`:
|
||||
* `Cache`:
|
||||
* All cache repos now do not have `open` vals - to avoid collisions in runtime
|
||||
|
||||
## 0.24.9
|
||||
|
||||
* `Pagination`:
|
||||
* Make alternative constructor parameter `size` of `PaginationResult` with default value
|
||||
* Add `Pagination.previousPage` extension
|
||||
* `Compose`:
|
||||
* Rework of `InfinityPagedComponentContext`
|
||||
* Rework of `PagedComponent`
|
||||
|
||||
## 0.24.8
|
||||
|
||||
* `Versions`:
|
||||
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = true
|
@@ -1,7 +1,6 @@
|
||||
package dev.inmo.micro_utils.coroutines.collections
|
||||
|
||||
import dev.inmo.micro_utils.coroutines.SmartRWLocker
|
||||
import dev.inmo.micro_utils.coroutines.waitReadRelease
|
||||
import dev.inmo.micro_utils.coroutines.withReadAcquire
|
||||
import dev.inmo.micro_utils.coroutines.withWriteLock
|
||||
import kotlinx.coroutines.job
|
||||
@@ -93,7 +92,7 @@ class SortedBinaryTreeNode<T>(
|
||||
* This process will continue until function will not find place to put [SortedBinaryTreeNode] with data or
|
||||
* [SortedBinaryTreeNode] with [SortedBinaryTreeNode.data] same as [newData] will be found
|
||||
*/
|
||||
private suspend fun <T> SortedBinaryTreeNode<T>.addSubNode(
|
||||
private suspend fun <T> SortedBinaryTreeNode<T>.upsertSubNode(
|
||||
subNode: SortedBinaryTreeNode<T>,
|
||||
skipLockers: Set<SmartRWLocker> = emptySet()
|
||||
): SortedBinaryTreeNode<T> {
|
||||
@@ -149,7 +148,7 @@ private suspend fun <T> SortedBinaryTreeNode<T>.addSubNode(
|
||||
* [SortedBinaryTreeNode] with [SortedBinaryTreeNode.data] same as [newData] will be found
|
||||
*/
|
||||
suspend fun <T> SortedBinaryTreeNode<T>.addSubNode(newData: T): SortedBinaryTreeNode<T> {
|
||||
return addSubNode(
|
||||
return upsertSubNode(
|
||||
SortedBinaryTreeNode(newData, comparator)
|
||||
)
|
||||
}
|
||||
@@ -198,8 +197,8 @@ suspend fun <T> SortedBinaryTreeNode<T>.findParentNode(data: T): SortedBinaryTre
|
||||
*/
|
||||
suspend fun <T> SortedBinaryTreeNode<T>.removeSubNode(data: T): Pair<SortedBinaryTreeNode<T>, SortedBinaryTreeNode<T>>? {
|
||||
val onFoundToRemoveCallback: suspend SortedBinaryTreeNode<T>.(left: SortedBinaryTreeNode<T>?, right: SortedBinaryTreeNode<T>?) -> Unit = { left, right ->
|
||||
left ?.also { leftNode -> addSubNode(leftNode, setOf(locker)) }
|
||||
right ?.also { rightNode -> addSubNode(rightNode, setOf(locker)) }
|
||||
left ?.also { leftNode -> upsertSubNode(leftNode, setOf(locker)) }
|
||||
right ?.also { rightNode -> upsertSubNode(rightNode, setOf(locker)) }
|
||||
}
|
||||
while (coroutineContext.job.isActive) {
|
||||
val foundParentNode = findParentNode(data) ?: return null
|
||||
|
@@ -0,0 +1,401 @@
|
||||
package dev.inmo.micro_utils.coroutines.collections
|
||||
|
||||
import dev.inmo.micro_utils.coroutines.SmartRWLocker
|
||||
import dev.inmo.micro_utils.coroutines.withReadAcquire
|
||||
import dev.inmo.micro_utils.coroutines.withWriteLock
|
||||
import kotlinx.coroutines.job
|
||||
import kotlinx.serialization.Serializable
|
||||
import kotlin.coroutines.coroutineContext
|
||||
|
||||
/**
|
||||
* Creates simple [Comparator] which will use [compareTo] of [T] for both objects
|
||||
*/
|
||||
private fun <T : Comparable<C>, C : T> T.createComparator() = Comparator<C> { o1, o2 -> o1.compareTo(o2) }
|
||||
|
||||
@Serializable
|
||||
class SortedMapLikeBinaryTreeNode<K, V>(
|
||||
val key: K,
|
||||
val value: V,
|
||||
internal val comparator: Comparator<K>,
|
||||
) : Iterable<SortedMapLikeBinaryTreeNode<K, V>> {
|
||||
internal var leftNode: SortedMapLikeBinaryTreeNode<K, V>? = null
|
||||
internal var rightNode: SortedMapLikeBinaryTreeNode<K, V>? = null
|
||||
internal val locker: SmartRWLocker by lazy {
|
||||
SmartRWLocker()
|
||||
}
|
||||
|
||||
suspend fun getLeftNode() = locker.withReadAcquire {
|
||||
leftNode
|
||||
}
|
||||
|
||||
suspend fun getRightNode() = locker.withReadAcquire {
|
||||
rightNode
|
||||
}
|
||||
|
||||
suspend fun getLeftKey() = getLeftNode() ?.key
|
||||
suspend fun getLeftValue() = getLeftNode() ?.value
|
||||
|
||||
suspend fun getRightKey() = getRightNode() ?.value
|
||||
suspend fun getRightValue() = getRightNode() ?.value
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
return other === this || (other is SortedMapLikeBinaryTreeNode<*, *> && other.key == key && other.rightNode == rightNode && other.leftNode == leftNode)
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return key.hashCode() * 31 + rightNode.hashCode() + leftNode.hashCode()
|
||||
}
|
||||
|
||||
suspend fun size(): Int {
|
||||
return locker.withReadAcquire {
|
||||
1 + (leftNode ?.size() ?: 0) + (rightNode ?.size() ?: 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This [Iterator] will run from less to greater values of nodes starting the
|
||||
* [dev.inmo.micro_utils.coroutines.collections.SortedMapLikeBinaryTreeNode]-receiver. Due to non-suspending
|
||||
* nature of [iterator] builder, this [Iterator] **DO NOT** guarantee consistent content due to iterations. It
|
||||
* means, that tree can be changed during to iteration process
|
||||
*/
|
||||
override fun iterator(): Iterator<SortedMapLikeBinaryTreeNode<K, V>> = iterator {
|
||||
leftNode ?.let {
|
||||
it.iterator().forEach { yield(it) }
|
||||
}
|
||||
yield(this@SortedMapLikeBinaryTreeNode)
|
||||
rightNode ?.let {
|
||||
it.iterator().forEach { yield(it) }
|
||||
}
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "$key($leftNode;$rightNode)"
|
||||
}
|
||||
|
||||
companion object {
|
||||
operator fun <K : Comparable<K>, V> invoke(
|
||||
key: K,
|
||||
value: V
|
||||
) = SortedMapLikeBinaryTreeNode(
|
||||
key,
|
||||
value,
|
||||
key.createComparator()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Will add subnode in tree if there are no any node with [newData]
|
||||
*
|
||||
* * If [newData] is greater than [SortedMapLikeBinaryTreeNode.key] of currently checking node,
|
||||
* will be used [SortedMapLikeBinaryTreeNode.rightNode]
|
||||
* * If [newData] is equal to [SortedMapLikeBinaryTreeNode.key] of currently
|
||||
* checking node - will be returned currently checking node
|
||||
* * If [newData] is less than [SortedMapLikeBinaryTreeNode.key] of currently
|
||||
* checking node - will be used [SortedMapLikeBinaryTreeNode.leftNode]
|
||||
*
|
||||
* This process will continue until function will not find place to put [SortedMapLikeBinaryTreeNode] with data or
|
||||
* [SortedMapLikeBinaryTreeNode] with [SortedMapLikeBinaryTreeNode.key] same as [newData] will be found
|
||||
*
|
||||
* @param replaceMode Will replace only value if node already exists
|
||||
*/
|
||||
private suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.upsertSubNode(
|
||||
subNode: SortedMapLikeBinaryTreeNode<K, V>,
|
||||
skipLockers: Set<SmartRWLocker> = emptySet(),
|
||||
replaceMode: Boolean
|
||||
): SortedMapLikeBinaryTreeNode<K, V> {
|
||||
var currentlyChecking = this
|
||||
var latestParent: SortedMapLikeBinaryTreeNode<K, V>? = null
|
||||
val lockedLockers = mutableSetOf<SmartRWLocker>()
|
||||
try {
|
||||
while (coroutineContext.job.isActive) {
|
||||
if (currentlyChecking.locker !in lockedLockers && currentlyChecking.locker !in skipLockers) {
|
||||
currentlyChecking.locker.lockWrite()
|
||||
lockedLockers.add(currentlyChecking.locker)
|
||||
}
|
||||
val left = currentlyChecking.leftNode
|
||||
val right = currentlyChecking.rightNode
|
||||
val comparingResult = currentlyChecking.comparator.compare(subNode.key, currentlyChecking.key)
|
||||
val isGreater = comparingResult > 0
|
||||
when {
|
||||
comparingResult == 0 -> {
|
||||
val resultNode = if (replaceMode) {
|
||||
subNode
|
||||
} else {
|
||||
val newNode = SortedMapLikeBinaryTreeNode(
|
||||
subNode.key,
|
||||
subNode.value,
|
||||
currentlyChecking.comparator,
|
||||
)
|
||||
newNode.leftNode = currentlyChecking.leftNode
|
||||
newNode.rightNode = currentlyChecking.rightNode
|
||||
newNode
|
||||
}
|
||||
|
||||
latestParent ?.let {
|
||||
when {
|
||||
it.leftNode === currentlyChecking -> it.leftNode = resultNode
|
||||
it.rightNode === currentlyChecking -> it.rightNode = resultNode
|
||||
}
|
||||
}
|
||||
|
||||
return resultNode
|
||||
}
|
||||
isGreater && right == null -> {
|
||||
currentlyChecking.rightNode = subNode
|
||||
return subNode
|
||||
}
|
||||
isGreater && right != null -> {
|
||||
latestParent = currentlyChecking
|
||||
currentlyChecking = right
|
||||
}
|
||||
left == null -> {
|
||||
currentlyChecking.leftNode = subNode
|
||||
return subNode
|
||||
}
|
||||
else -> {
|
||||
latestParent = currentlyChecking
|
||||
currentlyChecking = left
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lockedLockers.forEach {
|
||||
runCatching { it.unlockWrite() }
|
||||
}
|
||||
}
|
||||
error("Unable to add node")
|
||||
}
|
||||
|
||||
/**
|
||||
* Will add subnode in tree if there are no any node with [key]
|
||||
*
|
||||
* * If [key] is greater than [SortedMapLikeBinaryTreeNode.key] of currently checking node,
|
||||
* will be used [SortedMapLikeBinaryTreeNode.rightNode]
|
||||
* * If [key] is equal to [SortedMapLikeBinaryTreeNode.key] of currently
|
||||
* checking node - will be returned currently checking node
|
||||
* * If [key] is less than [SortedMapLikeBinaryTreeNode.key] of currently
|
||||
* checking node - will be used [SortedMapLikeBinaryTreeNode.leftNode]
|
||||
*
|
||||
* This process will continue until function will not find place to put [SortedMapLikeBinaryTreeNode] with data or
|
||||
* [SortedMapLikeBinaryTreeNode] with [SortedMapLikeBinaryTreeNode.key] same as [key] will be found
|
||||
*/
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.upsertSubNode(
|
||||
key: K,
|
||||
value: V
|
||||
): SortedMapLikeBinaryTreeNode<K, V> {
|
||||
return upsertSubNode(
|
||||
SortedMapLikeBinaryTreeNode(key, value, comparator),
|
||||
replaceMode = false
|
||||
)
|
||||
}
|
||||
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.findParentNode(data: K): SortedMapLikeBinaryTreeNode<K, V>? {
|
||||
var currentParent: SortedMapLikeBinaryTreeNode<K, V>? = null
|
||||
var currentlyChecking: SortedMapLikeBinaryTreeNode<K, V>? = this
|
||||
val lockedLockers = mutableSetOf<SmartRWLocker>()
|
||||
try {
|
||||
while (coroutineContext.job.isActive) {
|
||||
if (currentlyChecking == null) {
|
||||
return null
|
||||
}
|
||||
if (currentlyChecking.locker !in lockedLockers) {
|
||||
currentlyChecking.locker.acquireRead()
|
||||
lockedLockers.add(currentlyChecking.locker)
|
||||
}
|
||||
val comparingResult = currentlyChecking.comparator.compare(data, currentlyChecking.key)
|
||||
when {
|
||||
comparingResult > 0 -> {
|
||||
currentParent = currentlyChecking
|
||||
currentlyChecking = currentlyChecking.rightNode
|
||||
continue
|
||||
}
|
||||
comparingResult < 0 -> {
|
||||
currentParent = currentlyChecking
|
||||
currentlyChecking = currentlyChecking.leftNode
|
||||
continue
|
||||
}
|
||||
else -> return currentParent
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lockedLockers.forEach {
|
||||
runCatching { it.releaseRead() }
|
||||
}
|
||||
}
|
||||
error("Unable to find node")
|
||||
}
|
||||
|
||||
/**
|
||||
* Will remove (detach) node from tree starting with [this] [SortedMapLikeBinaryTreeNode]
|
||||
*
|
||||
* @return If data were found, [Pair] where [Pair.first] is the parent node where from [Pair.second] has been detached;
|
||||
* null otherwise
|
||||
*/
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.removeSubNode(data: K): Pair<SortedMapLikeBinaryTreeNode<K, V>, SortedMapLikeBinaryTreeNode<K, V>>? {
|
||||
val onFoundToRemoveCallback: suspend SortedMapLikeBinaryTreeNode<K, V>.(left: SortedMapLikeBinaryTreeNode<K, V>?, right: SortedMapLikeBinaryTreeNode<K, V>?) -> Unit = { left, right ->
|
||||
left ?.also { leftNode -> upsertSubNode(leftNode, setOf(locker), replaceMode = true) }
|
||||
right ?.also { rightNode -> upsertSubNode(rightNode, setOf(locker), replaceMode = true) }
|
||||
}
|
||||
while (coroutineContext.job.isActive) {
|
||||
val foundParentNode = findParentNode(data) ?: return null
|
||||
foundParentNode.locker.withWriteLock {
|
||||
val left = foundParentNode.leftNode
|
||||
val right = foundParentNode.rightNode
|
||||
when {
|
||||
left != null && left.comparator.compare(data, left.key) == 0 -> {
|
||||
foundParentNode.leftNode = null
|
||||
foundParentNode.onFoundToRemoveCallback(left.leftNode, left.rightNode)
|
||||
return foundParentNode to left
|
||||
}
|
||||
right != null && right.comparator.compare(data, right.key) == 0 -> {
|
||||
foundParentNode.rightNode = null
|
||||
foundParentNode.onFoundToRemoveCallback(right.leftNode, right.rightNode)
|
||||
return foundParentNode to right
|
||||
}
|
||||
else -> {
|
||||
return@withWriteLock // data has been changed, new search required
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
error("Unable to remove node")
|
||||
}
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.findNode(key: K): SortedMapLikeBinaryTreeNode<K, V>? {
|
||||
var currentlyChecking: SortedMapLikeBinaryTreeNode<K, V>? = this
|
||||
val lockedLockers = mutableSetOf<SmartRWLocker>()
|
||||
try {
|
||||
while (coroutineContext.job.isActive) {
|
||||
if (currentlyChecking == null) {
|
||||
return null
|
||||
}
|
||||
if (currentlyChecking.locker !in lockedLockers) {
|
||||
currentlyChecking.locker.acquireRead()
|
||||
lockedLockers.add(currentlyChecking.locker)
|
||||
}
|
||||
val comparingResult = currentlyChecking.comparator.compare(key, currentlyChecking.key)
|
||||
when {
|
||||
comparingResult > 0 -> {
|
||||
currentlyChecking = currentlyChecking.rightNode
|
||||
continue
|
||||
}
|
||||
comparingResult < 0 -> {
|
||||
currentlyChecking = currentlyChecking.leftNode
|
||||
continue
|
||||
}
|
||||
else -> return currentlyChecking
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lockedLockers.forEach {
|
||||
runCatching { it.releaseRead() }
|
||||
}
|
||||
}
|
||||
error("Unable to find node")
|
||||
}
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.contains(data: K): Boolean = findNode(data) != null
|
||||
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.findNodesInRange(from: K, to: K, fromInclusiveMode: Boolean, toInclusiveMode: Boolean): Set<SortedMapLikeBinaryTreeNode<K, V>> {
|
||||
val results = mutableSetOf<SortedMapLikeBinaryTreeNode<K, V>>()
|
||||
val leftToCheck = mutableSetOf(this)
|
||||
val lockedLockers = mutableSetOf<SmartRWLocker>()
|
||||
val fromComparingFun: (SortedMapLikeBinaryTreeNode<K, V>) -> Boolean = if (fromInclusiveMode) {
|
||||
{ it.comparator.compare(from, it.key) <= 0 }
|
||||
} else {
|
||||
{ it.comparator.compare(from, it.key) < 0 }
|
||||
}
|
||||
val toComparingFun: (SortedMapLikeBinaryTreeNode<K, V>) -> Boolean = if (toInclusiveMode) {
|
||||
{ it.comparator.compare(to, it.key) >= 0 }
|
||||
} else {
|
||||
{ it.comparator.compare(to, it.key) > 0 }
|
||||
}
|
||||
try {
|
||||
while (coroutineContext.job.isActive && leftToCheck.isNotEmpty()) {
|
||||
val currentlyChecking = leftToCheck.first()
|
||||
leftToCheck.remove(currentlyChecking)
|
||||
if (currentlyChecking in results) {
|
||||
continue
|
||||
}
|
||||
currentlyChecking.locker.acquireRead()
|
||||
lockedLockers.add(currentlyChecking.locker)
|
||||
if (fromComparingFun(currentlyChecking) && toComparingFun(currentlyChecking)) {
|
||||
results.add(currentlyChecking)
|
||||
currentlyChecking.leftNode ?.let { leftToCheck.add(it) }
|
||||
currentlyChecking.rightNode ?.let { leftToCheck.add(it) }
|
||||
continue
|
||||
}
|
||||
when {
|
||||
currentlyChecking.comparator.compare(to, currentlyChecking.key) < 0 -> currentlyChecking.leftNode ?.let { leftToCheck.add(it) }
|
||||
currentlyChecking.comparator.compare(from, currentlyChecking.key) > 0 -> currentlyChecking.rightNode ?.let { leftToCheck.add(it) }
|
||||
}
|
||||
}
|
||||
return results.toSet()
|
||||
} finally {
|
||||
lockedLockers.forEach {
|
||||
runCatching { it.releaseRead() }
|
||||
}
|
||||
}
|
||||
error("Unable to find nodes range")
|
||||
}
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.deepEquals(other: SortedMapLikeBinaryTreeNode<K, V>): Boolean {
|
||||
val leftToCheck = mutableSetOf(this)
|
||||
val othersToCheck = mutableSetOf(other)
|
||||
val lockedLockers = mutableSetOf<SmartRWLocker>()
|
||||
try {
|
||||
while (leftToCheck.isNotEmpty() && othersToCheck.isNotEmpty()) {
|
||||
val thisToCheck = leftToCheck.first()
|
||||
leftToCheck.remove(thisToCheck)
|
||||
|
||||
val otherToCheck = othersToCheck.first()
|
||||
othersToCheck.remove(otherToCheck)
|
||||
|
||||
if (thisToCheck.locker !in lockedLockers) {
|
||||
thisToCheck.locker.acquireRead()
|
||||
lockedLockers.add(thisToCheck.locker)
|
||||
}
|
||||
if (otherToCheck.locker !in lockedLockers) {
|
||||
otherToCheck.locker.acquireRead()
|
||||
lockedLockers.add(otherToCheck.locker)
|
||||
}
|
||||
|
||||
if (thisToCheck.key != otherToCheck.key || thisToCheck.value != otherToCheck.value) {
|
||||
return false
|
||||
}
|
||||
|
||||
if ((thisToCheck.leftNode == null).xor(otherToCheck.leftNode == null)) {
|
||||
return false
|
||||
}
|
||||
if ((thisToCheck.rightNode == null).xor(otherToCheck.rightNode == null)) {
|
||||
return false
|
||||
}
|
||||
|
||||
thisToCheck.leftNode?.let { leftToCheck.add(it) }
|
||||
thisToCheck.rightNode?.let { leftToCheck.add(it) }
|
||||
|
||||
otherToCheck.leftNode?.let { othersToCheck.add(it) }
|
||||
otherToCheck.rightNode?.let { othersToCheck.add(it) }
|
||||
}
|
||||
} finally {
|
||||
lockedLockers.forEach {
|
||||
runCatching { it.releaseRead() }
|
||||
}
|
||||
}
|
||||
|
||||
return leftToCheck.isEmpty() && othersToCheck.isEmpty()
|
||||
}
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.findNodesInRange(from: K, to: K): Set<SortedMapLikeBinaryTreeNode<K, V>> = findNodesInRange(
|
||||
from = from,
|
||||
to = to,
|
||||
fromInclusiveMode = true,
|
||||
toInclusiveMode = true
|
||||
)
|
||||
suspend fun <K, V> SortedMapLikeBinaryTreeNode<K, V>.findNodesInRangeExcluding(from: K, to: K): Set<SortedMapLikeBinaryTreeNode<K, V>> = findNodesInRange(
|
||||
from = from,
|
||||
to = to,
|
||||
fromInclusiveMode = false,
|
||||
toInclusiveMode = false
|
||||
)
|
||||
suspend fun <K : Comparable<K>, V> SortedMapLikeBinaryTreeNode<K, V>.findNodesInRange(range: ClosedRange<K>): Set<SortedMapLikeBinaryTreeNode<K, V>> = findNodesInRange(
|
||||
from = range.start,
|
||||
to = range.endInclusive,
|
||||
)
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = false
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = true
|
@@ -1,25 +1,20 @@
|
||||
package dev.inmo.micro_utils.coroutines
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import kotlin.test.Test
|
||||
|
||||
class HandleSafelyCoroutineContextTest {
|
||||
@Test
|
||||
fun testHandleSafelyCoroutineContext() {
|
||||
val scope = CoroutineScope(Dispatchers.Default)
|
||||
fun testHandleSafelyCoroutineContext() = runTest {
|
||||
val scope = this
|
||||
var contextHandlerHappen = false
|
||||
var localHandlerHappen = false
|
||||
var defaultHandlerHappen = false
|
||||
defaultSafelyExceptionHandler = {
|
||||
defaultHandlerHappen = true
|
||||
throw it
|
||||
}
|
||||
val contextHandler: ExceptionHandler<Unit> = {
|
||||
contextHandlerHappen = true
|
||||
}
|
||||
val checkJob = scope.launch {
|
||||
safelyWithContextExceptionHandler(contextHandler) {
|
||||
safely(
|
||||
runCatchingLogging ({
|
||||
contextHandlerHappen = true
|
||||
}) {
|
||||
runCatchingLogging (
|
||||
{
|
||||
localHandlerHappen = true
|
||||
}
|
||||
@@ -29,10 +24,8 @@ class HandleSafelyCoroutineContextTest {
|
||||
println(coroutineContext)
|
||||
error("That must happen too:)")
|
||||
}
|
||||
}
|
||||
launchSynchronously { checkJob.join() }
|
||||
}.join()
|
||||
assert(contextHandlerHappen)
|
||||
assert(localHandlerHappen)
|
||||
assert(defaultHandlerHappen)
|
||||
}
|
||||
}
|
@@ -1,3 +1,5 @@
|
||||
package dev.inmo.micro_utils.coroutines
|
||||
|
||||
import dev.inmo.micro_utils.coroutines.collections.SortedBinaryTreeNode
|
||||
import dev.inmo.micro_utils.coroutines.collections.addSubNode
|
||||
import dev.inmo.micro_utils.coroutines.collections.findNode
|
||||
@@ -10,8 +12,6 @@ import kotlin.test.assertEquals
|
||||
import kotlin.test.assertTrue
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
expect val AllowDeepInsertOnWorksTest: Boolean
|
||||
|
||||
class SortedBinaryTreeNodeTests {
|
||||
@Test
|
||||
fun insertOnZeroLevelWorks() = runTest {
|
||||
@@ -46,7 +46,6 @@ class SortedBinaryTreeNodeTests {
|
||||
}
|
||||
@Test
|
||||
fun deepReInsertOnWorks() = runTest(timeout = 300.seconds) {
|
||||
if (AllowDeepInsertOnWorksTest == false) return@runTest
|
||||
val zeroNode = SortedBinaryTreeNode(0)
|
||||
val rangeRadius = 500
|
||||
val nodes = mutableMapOf<Int, SortedBinaryTreeNode<Int>>()
|
||||
@@ -124,7 +123,6 @@ class SortedBinaryTreeNodeTests {
|
||||
}
|
||||
@Test
|
||||
fun deepInsertOnWorks() = runTest(timeout = 240.seconds) {
|
||||
if (AllowDeepInsertOnWorksTest == false) return@runTest
|
||||
val zeroNode = SortedBinaryTreeNode(0)
|
||||
val rangeRadius = 500
|
||||
val nodes = mutableMapOf<Int, SortedBinaryTreeNode<Int>>()
|
@@ -0,0 +1,118 @@
|
||||
package dev.inmo.micro_utils.coroutines
|
||||
|
||||
import dev.inmo.micro_utils.coroutines.collections.*
|
||||
import kotlinx.coroutines.test.runTest
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
import kotlin.test.assertTrue
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
class SortedMapLikeBinaryTreeNodeTests {
|
||||
@Test
|
||||
fun insertOnZeroLevelWorks() = runTest {
|
||||
val zeroNode = SortedMapLikeBinaryTreeNode(0, 0)
|
||||
zeroNode.upsertSubNode(1, 1)
|
||||
zeroNode.upsertSubNode(-1, -1)
|
||||
|
||||
assertEquals(0, zeroNode.key)
|
||||
assertEquals(1, zeroNode.getRightNode() ?.key)
|
||||
assertEquals(-1, zeroNode.getLeftNode() ?.key)
|
||||
|
||||
assertEquals(0, zeroNode.findNode(0) ?.value)
|
||||
assertEquals(1, zeroNode.findNode(1) ?.value)
|
||||
assertEquals(-1, zeroNode.findNode(-1) ?.value)
|
||||
}
|
||||
@Test
|
||||
fun searchOnZeroLevelWorks() = runTest {
|
||||
val zeroNode = SortedMapLikeBinaryTreeNode(0, 0)
|
||||
val oneNode = zeroNode.upsertSubNode(1, 1)
|
||||
val minusOneNode = zeroNode.upsertSubNode(-1, -1)
|
||||
|
||||
val assertingNodesToSearchQuery = mapOf(
|
||||
setOf(oneNode) to (1 .. 1),
|
||||
setOf(zeroNode, oneNode) to (0 .. 1),
|
||||
setOf(minusOneNode, zeroNode, oneNode) to (-1 .. 1),
|
||||
setOf(minusOneNode, zeroNode) to (-1 .. 0),
|
||||
setOf(minusOneNode) to (-1 .. -1),
|
||||
setOf(zeroNode) to (0 .. 0),
|
||||
)
|
||||
|
||||
assertingNodesToSearchQuery.forEach {
|
||||
val foundData = zeroNode.findNodesInRange(it.value)
|
||||
assertTrue(foundData.containsAll(it.key))
|
||||
assertTrue(it.key.containsAll(foundData))
|
||||
}
|
||||
}
|
||||
@Test
|
||||
fun deepReInsertOnWorks() = runTest(timeout = 300.seconds) {
|
||||
var zeroNode = SortedMapLikeBinaryTreeNode(0, 0)
|
||||
val rangeRadius = 500
|
||||
val nodes = mutableMapOf<Int, SortedMapLikeBinaryTreeNode<Int, Int>>()
|
||||
for (i in -rangeRadius .. rangeRadius) {
|
||||
nodes[i] = zeroNode.upsertSubNode(i, i)
|
||||
if (i == zeroNode.key) {
|
||||
zeroNode = nodes.getValue(i)
|
||||
}
|
||||
}
|
||||
|
||||
for (i in -rangeRadius .. rangeRadius) {
|
||||
val expectedNode = nodes.getValue(i)
|
||||
val foundNode = zeroNode.findNode(i)
|
||||
|
||||
assertEquals(expectedNode, foundNode)
|
||||
|
||||
if (expectedNode === zeroNode) continue
|
||||
|
||||
val parentNode = zeroNode.findParentNode(i)
|
||||
assertTrue(
|
||||
parentNode ?.getLeftNode() === expectedNode || parentNode ?.getRightNode() === expectedNode,
|
||||
"It is expected, that parent node with data ${parentNode ?.key} will be parent of ${expectedNode.key}, but its left subnode is ${parentNode ?.getLeftNode() ?.key} and right one is ${parentNode ?.getRightNode() ?.key}"
|
||||
)
|
||||
assertTrue(
|
||||
foundNode != null && expectedNode.deepEquals(foundNode)
|
||||
)
|
||||
|
||||
zeroNode.upsertSubNode(i, -i)
|
||||
val foundModifiedNode = zeroNode.findNode(i)
|
||||
assertEquals(foundNode ?.value, foundModifiedNode ?.value ?.times(-1))
|
||||
}
|
||||
}
|
||||
@Test
|
||||
fun deepInsertOnWorks() = runTest(timeout = 240.seconds) {
|
||||
val zeroNode = SortedMapLikeBinaryTreeNode(0, 0)
|
||||
val rangeRadius = 500
|
||||
val nodes = mutableMapOf<Int, SortedMapLikeBinaryTreeNode<Int, Int>>()
|
||||
for (i in -rangeRadius .. rangeRadius) {
|
||||
if (zeroNode.key != i) {
|
||||
nodes[i] = zeroNode.upsertSubNode(i, i)
|
||||
}
|
||||
}
|
||||
nodes[zeroNode.key] = zeroNode
|
||||
|
||||
for (i in -rangeRadius .. rangeRadius) {
|
||||
val expectedNode = nodes.getValue(i)
|
||||
val foundNode = zeroNode.findNode(i)
|
||||
|
||||
assertTrue(expectedNode === foundNode)
|
||||
|
||||
if (expectedNode === zeroNode) continue
|
||||
|
||||
val parentNode = zeroNode.findParentNode(i)
|
||||
assertTrue(
|
||||
parentNode ?.getLeftNode() === expectedNode || parentNode ?.getRightNode() === expectedNode,
|
||||
"It is expected, that parent node with data ${parentNode ?.key} will be parent of ${expectedNode.key}, but its left subnode is ${parentNode ?.getLeftNode() ?.key} and right one is ${parentNode ?.getRightNode() ?.key}"
|
||||
)
|
||||
}
|
||||
|
||||
val sourceTreeSize = zeroNode.size()
|
||||
|
||||
var previousData = -rangeRadius - 1
|
||||
for (node in zeroNode) {
|
||||
assertTrue(nodes[node.key] === node)
|
||||
assertTrue(previousData == node.key - 1)
|
||||
previousData = node.key
|
||||
}
|
||||
|
||||
assertTrue(sourceTreeSize == zeroNode.size())
|
||||
}
|
||||
}
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = true
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = true
|
@@ -1,2 +0,0 @@
|
||||
actual val AllowDeepInsertOnWorksTest: Boolean
|
||||
get() = true
|
@@ -15,5 +15,5 @@ crypto_js_version=4.1.1
|
||||
# Project data
|
||||
|
||||
group=dev.inmo
|
||||
version=0.24.8
|
||||
android_code_version=288
|
||||
version=0.25.1
|
||||
android_code_version=291
|
||||
|
@@ -32,7 +32,7 @@ data class PaginationResult<T>(
|
||||
page: Int,
|
||||
results: List<T>,
|
||||
pagesNumber: Int,
|
||||
size: Int
|
||||
size: Int = results.size
|
||||
) : this(
|
||||
page,
|
||||
size,
|
||||
|
@@ -26,6 +26,16 @@ inline fun Pagination.nextPage() =
|
||||
size
|
||||
)
|
||||
|
||||
/**
|
||||
* This method DO NOT check [Pagination.page] of receiver. Returns pagination for previous page
|
||||
*/
|
||||
@Suppress("NOTHING_TO_INLINE")
|
||||
inline fun Pagination.previousPage() =
|
||||
SimplePagination(
|
||||
page - 1,
|
||||
size
|
||||
)
|
||||
|
||||
/**
|
||||
* @param page Current page number
|
||||
* @param size Current page size
|
||||
|
@@ -1,7 +1,14 @@
|
||||
package dev.inmo.micro_utils.pagination.compose
|
||||
|
||||
import androidx.compose.runtime.*
|
||||
import dev.inmo.micro_utils.coroutines.SpecialMutableStateFlow
|
||||
import dev.inmo.micro_utils.coroutines.launchLoggingDropExceptions
|
||||
import dev.inmo.micro_utils.coroutines.runCatchingLogging
|
||||
import dev.inmo.micro_utils.pagination.*
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.sync.Mutex
|
||||
import kotlinx.coroutines.sync.withLock
|
||||
|
||||
/**
|
||||
* Context for managing infinite pagination in a Compose UI.
|
||||
@@ -15,35 +22,46 @@ import dev.inmo.micro_utils.pagination.*
|
||||
*/
|
||||
class InfinityPagedComponentContext<T> internal constructor(
|
||||
page: Int,
|
||||
size: Int
|
||||
size: Int,
|
||||
private val scope: CoroutineScope,
|
||||
private val loader: suspend InfinityPagedComponentContext<T>.(Pagination) -> PaginationResult<T>
|
||||
) {
|
||||
internal val startPage = SimplePagination(page, size)
|
||||
internal val iterationState: MutableState<Pair<Int, Pagination?>> = mutableStateOf(0 to null)
|
||||
internal val dataState: MutableState<List<T>?> = mutableStateOf(null)
|
||||
internal var lastPageLoaded = false
|
||||
internal val latestLoadedPage = SpecialMutableStateFlow<PaginationResult<T>?>(null)
|
||||
internal val dataState = SpecialMutableStateFlow<List<T>?>(null)
|
||||
internal var loadingJob: Job? = null
|
||||
internal val loadingMutex = Mutex()
|
||||
|
||||
/**
|
||||
* Loads the next page of data. If the current page is the last one, the function returns early.
|
||||
*/
|
||||
fun loadNext() {
|
||||
if (lastPageLoaded) return
|
||||
if (iterationState.value.second is SimplePagination) return // Data loading has been inited but not loaded yet
|
||||
|
||||
iterationState.value = iterationState.value.let {
|
||||
if ((it.second as? PaginationResult<*>) ?.isLastPage == true) return
|
||||
(it.first + 1) to (it.second ?: startPage).nextPage()
|
||||
fun loadNext(): Job {
|
||||
return scope.launchLoggingDropExceptions {
|
||||
loadingMutex.withLock {
|
||||
if (latestLoadedPage.value ?.isLastPage == true) return@launchLoggingDropExceptions
|
||||
loadingJob = loadingJob ?: scope.launchLoggingDropExceptions {
|
||||
runCatching {
|
||||
loader(latestLoadedPage.value ?.nextPage() ?: startPage)
|
||||
}.onSuccess {
|
||||
latestLoadedPage.value = it
|
||||
dataState.value = (dataState.value ?: emptyList()) + it.results
|
||||
}
|
||||
loadingMutex.withLock {
|
||||
loadingJob = null
|
||||
}
|
||||
}
|
||||
loadingJob
|
||||
} ?.join()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reloads the pagination from the first page, clearing previously loaded data.
|
||||
*/
|
||||
fun reload() {
|
||||
fun reload(): Job {
|
||||
latestLoadedPage.value = null
|
||||
dataState.value = null
|
||||
lastPageLoaded = false
|
||||
iterationState.value = iterationState.value.let {
|
||||
(it.first + 1) to null
|
||||
}
|
||||
return loadNext()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,20 +80,17 @@ internal fun <T> InfinityPagedComponent(
|
||||
page: Int,
|
||||
size: Int,
|
||||
loader: suspend InfinityPagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable InfinityPagedComponentContext<T>.(List<T>?) -> Unit
|
||||
) {
|
||||
val context = remember { InfinityPagedComponentContext<T>(page, size) }
|
||||
|
||||
LaunchedEffect(context.iterationState.value.first) {
|
||||
val paginationResult = loader(context, context.iterationState.value.second ?: context.startPage)
|
||||
if (paginationResult.isLastPage) {
|
||||
context.lastPageLoaded = true
|
||||
}
|
||||
context.iterationState.value = context.iterationState.value.copy(second = paginationResult)
|
||||
context.dataState.value = (context.dataState.value ?: emptyList()) + paginationResult.results
|
||||
val scope = predefinedScope ?: rememberCoroutineScope()
|
||||
val context = remember { InfinityPagedComponentContext<T>(page, size, scope, loader) }
|
||||
remember {
|
||||
context.reload()
|
||||
}
|
||||
|
||||
context.block(context.dataState.value)
|
||||
val dataState = context.dataState.collectAsState()
|
||||
context.block(dataState.value)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -91,12 +106,14 @@ internal fun <T> InfinityPagedComponent(
|
||||
fun <T> InfinityPagedComponent(
|
||||
pageInfo: Pagination,
|
||||
loader: suspend InfinityPagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable InfinityPagedComponentContext<T>.(List<T>?) -> Unit
|
||||
) {
|
||||
InfinityPagedComponent(
|
||||
pageInfo.page,
|
||||
pageInfo.size,
|
||||
loader,
|
||||
predefinedScope,
|
||||
block
|
||||
)
|
||||
}
|
||||
@@ -114,7 +131,8 @@ fun <T> InfinityPagedComponent(
|
||||
fun <T> InfinityPagedComponent(
|
||||
size: Int,
|
||||
loader: suspend InfinityPagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable InfinityPagedComponentContext<T>.(List<T>?) -> Unit
|
||||
) {
|
||||
InfinityPagedComponent(0, size, loader, block)
|
||||
InfinityPagedComponent(0, size, loader, predefinedScope, block)
|
||||
}
|
||||
|
@@ -1,10 +1,13 @@
|
||||
package dev.inmo.micro_utils.pagination.compose
|
||||
|
||||
import androidx.compose.runtime.*
|
||||
import dev.inmo.micro_utils.common.Optional
|
||||
import dev.inmo.micro_utils.common.dataOrThrow
|
||||
import dev.inmo.micro_utils.common.optional
|
||||
import dev.inmo.micro_utils.coroutines.SpecialMutableStateFlow
|
||||
import dev.inmo.micro_utils.coroutines.launchLoggingDropExceptions
|
||||
import dev.inmo.micro_utils.pagination.*
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.sync.Mutex
|
||||
import kotlinx.coroutines.sync.withLock
|
||||
|
||||
/**
|
||||
* Context for managing paginated data in a Compose UI.
|
||||
@@ -19,45 +22,73 @@ import dev.inmo.micro_utils.pagination.*
|
||||
* @param size Number of items per page.
|
||||
*/
|
||||
class PagedComponentContext<T> internal constructor(
|
||||
preset: PaginationResult<T>? = null,
|
||||
initialPage: Int,
|
||||
size: Int
|
||||
size: Int,
|
||||
private val scope: CoroutineScope,
|
||||
private val loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>
|
||||
) {
|
||||
internal val iterationState: MutableState<Pair<Int, Pagination>> = mutableStateOf(0 to SimplePagination(preset?.page ?: initialPage, preset?.size ?: size))
|
||||
|
||||
internal var dataOptional: PaginationResult<T>? = preset
|
||||
private set
|
||||
internal val dataState: MutableState<PaginationResult<T>?> = mutableStateOf(dataOptional)
|
||||
internal val startPage = SimplePagination(initialPage, size)
|
||||
internal val latestLoadedPage = SpecialMutableStateFlow<PaginationResult<T>?>(null)
|
||||
internal val dataState = SpecialMutableStateFlow<PaginationResult<T>?>(null)
|
||||
internal var loadingJob: Job? = null
|
||||
internal val loadingMutex = Mutex()
|
||||
|
||||
private fun initLoadingJob(
|
||||
skipCheckerInLock: () -> Boolean,
|
||||
pageGetter: () -> Pagination
|
||||
): Job {
|
||||
return scope.launchLoggingDropExceptions {
|
||||
loadingMutex.withLock {
|
||||
if (skipCheckerInLock()) return@launchLoggingDropExceptions
|
||||
loadingJob = loadingJob ?: scope.launchLoggingDropExceptions {
|
||||
runCatching {
|
||||
loader(pageGetter())
|
||||
}.onSuccess {
|
||||
latestLoadedPage.value = it
|
||||
dataState.value = it
|
||||
}
|
||||
loadingMutex.withLock {
|
||||
loadingJob = null
|
||||
}
|
||||
}
|
||||
loadingJob
|
||||
} ?.join()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the next page of data. If the last page is reached, this function returns early.
|
||||
*/
|
||||
fun loadNext() {
|
||||
iterationState.value = iterationState.value.let {
|
||||
if (dataState.value ?.isLastPage == true) return
|
||||
(it.first + 1) to it.second.nextPage()
|
||||
fun loadNext(): Job {
|
||||
return initLoadingJob(
|
||||
{ latestLoadedPage.value ?.isLastPage == true }
|
||||
) {
|
||||
latestLoadedPage.value ?.nextPage() ?: startPage
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the previous page of data if available.
|
||||
*/
|
||||
fun loadPrevious() {
|
||||
iterationState.value = iterationState.value.let {
|
||||
if (it.second.isFirstPage) return
|
||||
(it.first - 1) to SimplePagination(
|
||||
it.second.page - 1,
|
||||
it.second.size
|
||||
)
|
||||
fun loadPrevious(): Job {
|
||||
return initLoadingJob(
|
||||
{ latestLoadedPage.value ?.isFirstPage == true }
|
||||
) {
|
||||
latestLoadedPage.value ?.previousPage() ?: startPage
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reloads the current page, refreshing the data.
|
||||
*/
|
||||
fun reload() {
|
||||
iterationState.value = iterationState.value.let {
|
||||
it.copy(it.first + 1)
|
||||
fun reload(): Job {
|
||||
return initLoadingJob(
|
||||
{
|
||||
latestLoadedPage.value = null
|
||||
true
|
||||
}
|
||||
) {
|
||||
startPage
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -74,46 +105,24 @@ class PagedComponentContext<T> internal constructor(
|
||||
*/
|
||||
@Composable
|
||||
internal fun <T> PagedComponent(
|
||||
preload: PaginationResult<T>?,
|
||||
initialPage: Int,
|
||||
size: Int,
|
||||
loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable PagedComponentContext<T>.(PaginationResult<T>) -> Unit
|
||||
) {
|
||||
val context = remember { PagedComponentContext(preload, initialPage, size) }
|
||||
|
||||
LaunchedEffect(context.iterationState.value) {
|
||||
context.dataState.value = loader(context, context.iterationState.value.second)
|
||||
val scope = predefinedScope ?: rememberCoroutineScope()
|
||||
val context = remember { PagedComponentContext<T>(initialPage, size, scope, loader) }
|
||||
remember {
|
||||
context.reload()
|
||||
}
|
||||
|
||||
context.dataState.value ?.let {
|
||||
val pageState = context.dataState.collectAsState()
|
||||
pageState.value ?.let {
|
||||
context.block(it)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Overloaded composable function for paginated components with preloaded data.
|
||||
*
|
||||
* @param T The type of paginated data.
|
||||
* @param preload Preloaded pagination result.
|
||||
* @param loader Suspended function that loads paginated data.
|
||||
* @param block Composable function that renders the UI with the loaded data.
|
||||
*/
|
||||
@Composable
|
||||
fun <T> PagedComponent(
|
||||
preload: PaginationResult<T>,
|
||||
loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
block: @Composable PagedComponentContext<T>.(PaginationResult<T>) -> Unit
|
||||
) {
|
||||
PagedComponent(
|
||||
preload,
|
||||
preload.page,
|
||||
preload.size,
|
||||
loader,
|
||||
block
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Overloaded composable function for paginated components with pagination info.
|
||||
*
|
||||
@@ -126,36 +135,18 @@ fun <T> PagedComponent(
|
||||
fun <T> PagedComponent(
|
||||
pageInfo: Pagination,
|
||||
loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable PagedComponentContext<T>.(PaginationResult<T>) -> Unit
|
||||
) {
|
||||
PagedComponent(
|
||||
null,
|
||||
pageInfo.page,
|
||||
pageInfo.size,
|
||||
loader,
|
||||
predefinedScope,
|
||||
block
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Overloaded composable function for paginated components with an initial page.
|
||||
*
|
||||
* @param T The type of paginated data.
|
||||
* @param initialPage Initial page number.
|
||||
* @param size Number of items per page.
|
||||
* @param loader Suspended function that loads paginated data.
|
||||
* @param block Composable function that renders the UI with the loaded data.
|
||||
*/
|
||||
@Composable
|
||||
fun <T> PagedComponent(
|
||||
initialPage: Int,
|
||||
size: Int,
|
||||
loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
block: @Composable PagedComponentContext<T>.(PaginationResult<T>) -> Unit
|
||||
) {
|
||||
PagedComponent(null, initialPage, size, loader, block)
|
||||
}
|
||||
|
||||
/**
|
||||
* Overloaded composable function for paginated components with only a size parameter.
|
||||
*
|
||||
@@ -168,7 +159,8 @@ fun <T> PagedComponent(
|
||||
fun <T> PagedComponent(
|
||||
size: Int,
|
||||
loader: suspend PagedComponentContext<T>.(Pagination) -> PaginationResult<T>,
|
||||
predefinedScope: CoroutineScope? = null,
|
||||
block: @Composable PagedComponentContext<T>.(PaginationResult<T>) -> Unit
|
||||
) {
|
||||
PagedComponent(0, size, loader, block)
|
||||
PagedComponent(0, size, loader, predefinedScope, block)
|
||||
}
|
||||
|
@@ -30,15 +30,13 @@ class InfinityPagedComponentTests {
|
||||
}
|
||||
) {
|
||||
if (it == null) {
|
||||
if (this.iterationState.value.second != null) {
|
||||
assertEquals(0, (this.iterationState.value.second as? SimplePagination) ?.page)
|
||||
}
|
||||
assertEquals(null, it)
|
||||
} else {
|
||||
assertEquals(expectedList, it)
|
||||
}
|
||||
|
||||
LaunchedEffect(it ?.size) {
|
||||
loadNext()
|
||||
loadNext().join()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -12,10 +12,10 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class ReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
protected open val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected open val kvCache: KVCache<IdType, ObjectType>,
|
||||
protected val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected val kvCache: KVCache<IdType, ObjectType>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected open val idGetter: (ObjectType) -> IdType
|
||||
protected val idGetter: (ObjectType) -> IdType
|
||||
) : ReadCRUDRepo<ObjectType, IdType> by parentRepo, CommonCacheRepo {
|
||||
override suspend fun getById(id: IdType): ObjectType? = locker.withReadAcquire {
|
||||
kvCache.get(id)
|
||||
@@ -51,11 +51,11 @@ fun <ObjectType, IdType> ReadCRUDRepo<ObjectType, IdType>.cached(
|
||||
) = ReadCRUDCacheRepo(this, kvCache, locker, idGetter)
|
||||
|
||||
open class WriteCRUDCacheRepo<ObjectType, IdType, InputValueType>(
|
||||
protected open val parentRepo: WriteCRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
protected open val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected open val scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
protected val parentRepo: WriteCRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
protected val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected val scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected open val idGetter: (ObjectType) -> IdType
|
||||
protected val idGetter: (ObjectType) -> IdType
|
||||
) : WriteCRUDRepo<ObjectType, IdType, InputValueType>, CommonCacheRepo {
|
||||
override val newObjectsFlow: Flow<ObjectType> by parentRepo::newObjectsFlow
|
||||
override val updatedObjectsFlow: Flow<ObjectType> by parentRepo::updatedObjectsFlow
|
||||
@@ -131,25 +131,25 @@ fun <ObjectType, IdType, InputType> WriteCRUDRepo<ObjectType, IdType, InputType>
|
||||
|
||||
|
||||
open class CRUDCacheRepo<ObjectType, IdType, InputValueType>(
|
||||
override val parentRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
protected val crudRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
kvCache: KVCache<IdType, ObjectType>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
locker: SmartRWLocker = SmartRWLocker(),
|
||||
idGetter: (ObjectType) -> IdType
|
||||
) : ReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
parentRepo,
|
||||
crudRepo,
|
||||
kvCache,
|
||||
locker,
|
||||
idGetter
|
||||
),
|
||||
WriteCRUDRepo<ObjectType, IdType, InputValueType> by WriteCRUDCacheRepo(
|
||||
parentRepo,
|
||||
WriteCRUDRepo<ObjectType, IdType, InputValueType> by WriteCRUDCacheRepo(
|
||||
crudRepo,
|
||||
kvCache,
|
||||
scope,
|
||||
locker,
|
||||
idGetter
|
||||
),
|
||||
CRUDRepo<ObjectType, IdType, InputValueType> {
|
||||
CRUDRepo<ObjectType, IdType, InputValueType> {
|
||||
override suspend fun invalidate() = kvCache.actualizeAll(parentRepo, locker = locker)
|
||||
}
|
||||
|
||||
|
@@ -12,8 +12,8 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class ReadKeyValueCacheRepo<Key,Value>(
|
||||
protected open val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected open val kvCache: KVCache<Key, Value>,
|
||||
protected val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected val kvCache: KVCache<Key, Value>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : ReadKeyValueRepo<Key,Value> by parentRepo, CommonCacheRepo {
|
||||
override suspend fun get(k: Key): Value? = locker.withReadAcquire {
|
||||
@@ -58,24 +58,24 @@ fun <Key, Value> ReadKeyValueRepo<Key, Value>.cached(
|
||||
) = ReadKeyValueCacheRepo(this, kvCache, locker)
|
||||
|
||||
open class KeyValueCacheRepo<Key,Value>(
|
||||
override val parentRepo: KeyValueRepo<Key, Value>,
|
||||
protected val kvRepo: KeyValueRepo<Key, Value>,
|
||||
kvCache: KVCache<Key, Value>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : ReadKeyValueCacheRepo<Key,Value>(parentRepo, kvCache, locker), KeyValueRepo<Key,Value>, WriteKeyValueRepo<Key, Value> by parentRepo, CommonCacheRepo {
|
||||
protected val onNewJob = parentRepo.onNewValue.onEach {
|
||||
) : ReadKeyValueCacheRepo<Key,Value>(kvRepo, kvCache, locker), KeyValueRepo<Key,Value>, WriteKeyValueRepo<Key, Value> by kvRepo, CommonCacheRepo {
|
||||
protected val onNewJob = kvRepo.onNewValue.onEach {
|
||||
locker.withWriteLock {
|
||||
kvCache.set(it.first, it.second)
|
||||
}
|
||||
}.launchIn(scope)
|
||||
protected val onRemoveJob = parentRepo.onValueRemoved.onEach {
|
||||
protected val onRemoveJob = kvRepo.onValueRemoved.onEach {
|
||||
locker.withWriteLock {
|
||||
kvCache.unset(it)
|
||||
}
|
||||
}.launchIn(scope)
|
||||
|
||||
override suspend fun clear() {
|
||||
parentRepo.clear()
|
||||
kvRepo.clear()
|
||||
locker.withWriteLock {
|
||||
kvCache.clear()
|
||||
}
|
||||
|
@@ -13,8 +13,8 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class ReadKeyValuesCacheRepo<Key,Value>(
|
||||
protected open val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected open val kvCache: KVCache<Key, List<Value>>,
|
||||
protected val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected val kvCache: KVCache<Key, List<Value>>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : ReadKeyValuesRepo<Key,Value> by parentRepo, CommonCacheRepo {
|
||||
override suspend fun get(k: Key, pagination: Pagination, reversed: Boolean): PaginationResult<Value> {
|
||||
|
@@ -18,7 +18,7 @@ import kotlinx.coroutines.launch
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
open class AutoRecacheReadCRUDRepo<RegisteredObject, Id>(
|
||||
protected open val originalRepo: ReadCRUDRepo<RegisteredObject, Id>,
|
||||
protected val originalRepo: ReadCRUDRepo<RegisteredObject, Id>,
|
||||
protected val scope: CoroutineScope,
|
||||
protected val kvCache: KeyValueRepo<Id, RegisteredObject> = MapKeyValueRepo(),
|
||||
protected val recacheDelay: Long = 60.seconds.inWholeMilliseconds,
|
||||
|
@@ -8,21 +8,21 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
open class AutoRecacheKeyValueRepo<Id, RegisteredObject>(
|
||||
override val originalRepo: KeyValueRepo<Id, RegisteredObject>,
|
||||
protected val kvRepo: KeyValueRepo<Id, RegisteredObject>,
|
||||
scope: CoroutineScope,
|
||||
kvCache: KeyValueRepo<Id, RegisteredObject> = MapKeyValueRepo(),
|
||||
recacheDelay: Long = 60.seconds.inWholeMilliseconds,
|
||||
actionWrapper: ActionWrapper = ActionWrapper.Direct,
|
||||
idGetter: (RegisteredObject) -> Id
|
||||
) : AutoRecacheReadKeyValueRepo<Id, RegisteredObject> (
|
||||
originalRepo,
|
||||
kvRepo,
|
||||
scope,
|
||||
kvCache,
|
||||
recacheDelay,
|
||||
actionWrapper,
|
||||
idGetter
|
||||
),
|
||||
WriteKeyValueRepo<Id, RegisteredObject> by AutoRecacheWriteKeyValueRepo(originalRepo, scope, kvCache),
|
||||
WriteKeyValueRepo<Id, RegisteredObject> by AutoRecacheWriteKeyValueRepo(kvRepo, scope, kvCache),
|
||||
KeyValueRepo<Id, RegisteredObject> {
|
||||
|
||||
constructor(
|
||||
@@ -34,14 +34,14 @@ open class AutoRecacheKeyValueRepo<Id, RegisteredObject>(
|
||||
idGetter: (RegisteredObject) -> Id
|
||||
) : this(originalRepo, scope, kvCache, recacheDelay, ActionWrapper.Timeouted(originalCallTimeoutMillis), idGetter)
|
||||
|
||||
override suspend fun unsetWithValues(toUnset: List<RegisteredObject>) = originalRepo.unsetWithValues(
|
||||
override suspend fun unsetWithValues(toUnset: List<RegisteredObject>) = kvRepo.unsetWithValues(
|
||||
toUnset
|
||||
).also {
|
||||
kvCache.unsetWithValues(toUnset)
|
||||
}
|
||||
|
||||
override suspend fun clear() {
|
||||
originalRepo.clear()
|
||||
kvRepo.clear()
|
||||
kvCache.clear()
|
||||
}
|
||||
}
|
||||
|
@@ -18,7 +18,7 @@ import kotlinx.coroutines.launch
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
open class AutoRecacheReadKeyValueRepo<Id, RegisteredObject>(
|
||||
protected open val originalRepo: ReadKeyValueRepo<Id, RegisteredObject>,
|
||||
protected val originalRepo: ReadKeyValueRepo<Id, RegisteredObject>,
|
||||
protected val scope: CoroutineScope,
|
||||
protected val kvCache: KeyValueRepo<Id, RegisteredObject> = MapKeyValueRepo(),
|
||||
protected val recacheDelay: Long = 60.seconds.inWholeMilliseconds,
|
||||
|
@@ -9,19 +9,19 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
open class AutoRecacheKeyValuesRepo<Id, RegisteredObject>(
|
||||
override val originalRepo: KeyValuesRepo<Id, RegisteredObject>,
|
||||
protected val kvsRepo: KeyValuesRepo<Id, RegisteredObject>,
|
||||
scope: CoroutineScope,
|
||||
kvCache: KeyValueRepo<Id, List<RegisteredObject>> = MapKeyValueRepo(),
|
||||
recacheDelay: Long = 60.seconds.inWholeMilliseconds,
|
||||
actionWrapper: ActionWrapper = ActionWrapper.Direct
|
||||
) : AutoRecacheReadKeyValuesRepo<Id, RegisteredObject> (
|
||||
originalRepo,
|
||||
kvsRepo,
|
||||
scope,
|
||||
kvCache,
|
||||
recacheDelay,
|
||||
actionWrapper
|
||||
),
|
||||
WriteKeyValuesRepo<Id, RegisteredObject> by AutoRecacheWriteKeyValuesRepo(originalRepo, scope, kvCache),
|
||||
WriteKeyValuesRepo<Id, RegisteredObject> by AutoRecacheWriteKeyValuesRepo(kvsRepo, scope, kvCache),
|
||||
KeyValuesRepo<Id, RegisteredObject> {
|
||||
|
||||
constructor(
|
||||
|
@@ -24,7 +24,7 @@ import kotlinx.coroutines.launch
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
open class AutoRecacheReadKeyValuesRepo<Id, RegisteredObject>(
|
||||
protected open val originalRepo: ReadKeyValuesRepo<Id, RegisteredObject>,
|
||||
protected val originalRepo: ReadKeyValuesRepo<Id, RegisteredObject>,
|
||||
protected val scope: CoroutineScope,
|
||||
protected val kvCache: KeyValueRepo<Id, List<RegisteredObject>> = MapKeyValueRepo(),
|
||||
protected val recacheDelay: Long = 60.seconds.inWholeMilliseconds,
|
||||
|
@@ -15,10 +15,10 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
|
||||
open class FullReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
protected open val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected open val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected open val idGetter: (ObjectType) -> IdType
|
||||
protected val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected val idGetter: (ObjectType) -> IdType
|
||||
) : ReadCRUDRepo<ObjectType, IdType>, FullCacheRepo {
|
||||
protected suspend inline fun <T> doOrTakeAndActualize(
|
||||
action: KeyValueRepo<IdType, ObjectType>.() -> Optional<T>,
|
||||
@@ -94,20 +94,20 @@ fun <ObjectType, IdType> ReadCRUDRepo<ObjectType, IdType>.cached(
|
||||
) = FullReadCRUDCacheRepo(this, kvCache, locker, idGetter)
|
||||
|
||||
open class FullCRUDCacheRepo<ObjectType, IdType, InputValueType>(
|
||||
override val parentRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
override val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected val crudRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
override val idGetter: (ObjectType) -> IdType
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
idGetter: (ObjectType) -> IdType
|
||||
) : FullReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
parentRepo,
|
||||
crudRepo,
|
||||
kvCache,
|
||||
locker,
|
||||
idGetter
|
||||
),
|
||||
WriteCRUDRepo<ObjectType, IdType, InputValueType> by WriteCRUDCacheRepo(
|
||||
parentRepo,
|
||||
crudRepo,
|
||||
kvCache,
|
||||
scope,
|
||||
locker,
|
||||
@@ -128,7 +128,7 @@ open class FullCRUDCacheRepo<ObjectType, IdType, InputValueType>(
|
||||
|
||||
protected open suspend fun initialInvalidate() {
|
||||
try {
|
||||
kvCache.actualizeAll(parentRepo, locker = null)
|
||||
kvCache.actualizeAll(crudRepo, locker = null)
|
||||
} finally {
|
||||
locker.unlockWrite()
|
||||
}
|
||||
|
@@ -15,9 +15,9 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class FullReadKeyValueCacheRepo<Key,Value>(
|
||||
protected open val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker()
|
||||
protected val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker()
|
||||
) : ReadKeyValueRepo<Key, Value>, FullCacheRepo {
|
||||
protected suspend inline fun <T> doOrTakeAndActualize(
|
||||
action: KeyValueRepo<Key, Value>.() -> Optional<T>,
|
||||
@@ -98,7 +98,7 @@ fun <Key, Value> ReadKeyValueRepo<Key, Value>.cached(
|
||||
|
||||
open class FullWriteKeyValueCacheRepo<Key,Value>(
|
||||
parentRepo: WriteKeyValueRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, Value>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
protected val locker: SmartRWLocker = SmartRWLocker()
|
||||
) : WriteKeyValueRepo<Key, Value> by parentRepo, FullCacheRepo {
|
||||
@@ -126,16 +126,16 @@ fun <Key, Value> WriteKeyValueRepo<Key, Value>.caching(
|
||||
) = FullWriteKeyValueCacheRepo(this, kvCache, scope)
|
||||
|
||||
open class FullKeyValueCacheRepo<Key,Value>(
|
||||
override val parentRepo: KeyValueRepo<Key, Value>,
|
||||
override val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val kvRepo: KeyValueRepo<Key, Value>,
|
||||
kvCache: KeyValueRepo<Key, Value>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
) : //FullWriteKeyValueCacheRepo<Key,Value>(parentRepo, kvCache, scope),
|
||||
KeyValueRepo<Key,Value>,
|
||||
WriteKeyValueRepo<Key,Value> by parentRepo,
|
||||
WriteKeyValueRepo<Key,Value> by kvRepo,
|
||||
FullReadKeyValueCacheRepo<Key, Value>(
|
||||
parentRepo,
|
||||
kvRepo,
|
||||
kvCache,
|
||||
locker
|
||||
) {
|
||||
@@ -151,7 +151,7 @@ open class FullKeyValueCacheRepo<Key,Value>(
|
||||
}
|
||||
}
|
||||
|
||||
override suspend fun unsetWithValues(toUnset: List<Value>) = parentRepo.unsetWithValues(toUnset)
|
||||
override suspend fun unsetWithValues(toUnset: List<Value>) = kvRepo.unsetWithValues(toUnset)
|
||||
|
||||
protected open suspend fun initialInvalidate() {
|
||||
try {
|
||||
@@ -165,13 +165,13 @@ open class FullKeyValueCacheRepo<Key,Value>(
|
||||
}
|
||||
|
||||
override suspend fun clear() {
|
||||
parentRepo.clear()
|
||||
kvRepo.clear()
|
||||
kvCache.clear()
|
||||
}
|
||||
|
||||
override suspend fun set(toSet: Map<Key, Value>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.set(toSet)
|
||||
kvRepo.set(toSet)
|
||||
kvCache.set(
|
||||
toSet.filter {
|
||||
parentRepo.contains(it.key)
|
||||
@@ -182,7 +182,7 @@ open class FullKeyValueCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun unset(toUnset: List<Key>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.unset(toUnset)
|
||||
kvRepo.unset(toUnset)
|
||||
kvCache.unset(
|
||||
toUnset.filter {
|
||||
!parentRepo.contains(it)
|
||||
|
@@ -16,9 +16,9 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class FullReadKeyValuesCacheRepo<Key,Value>(
|
||||
protected open val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : ReadKeyValuesRepo<Key, Value>, FullCacheRepo {
|
||||
protected suspend inline fun <T> doOrTakeAndActualize(
|
||||
action: KeyValueRepo<Key, List<Value>>.() -> Optional<T>,
|
||||
@@ -165,7 +165,7 @@ fun <Key, Value> ReadKeyValuesRepo<Key, Value>.cached(
|
||||
|
||||
open class FullWriteKeyValuesCacheRepo<Key,Value>(
|
||||
parentRepo: WriteKeyValuesRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : WriteKeyValuesRepo<Key, Value> by parentRepo, FullCacheRepo {
|
||||
@@ -200,14 +200,14 @@ fun <Key, Value> WriteKeyValuesRepo<Key, Value>.caching(
|
||||
) = FullWriteKeyValuesCacheRepo(this, kvCache, scope, locker)
|
||||
|
||||
open class FullKeyValuesCacheRepo<Key,Value>(
|
||||
override val parentRepo: KeyValuesRepo<Key, Value>,
|
||||
override val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val kvsRepo: KeyValuesRepo<Key, Value>,
|
||||
kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
) : KeyValuesRepo<Key, Value>,
|
||||
FullReadKeyValuesCacheRepo<Key, Value>(parentRepo, kvCache, locker),
|
||||
WriteKeyValuesRepo<Key, Value> by parentRepo {
|
||||
FullReadKeyValuesCacheRepo<Key, Value>(kvsRepo, kvCache, locker),
|
||||
WriteKeyValuesRepo<Key, Value> by kvsRepo {
|
||||
init {
|
||||
if (!skipStartInvalidate) {
|
||||
scope.launchLoggingDropExceptions {
|
||||
@@ -241,7 +241,7 @@ open class FullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun set(toSet: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.set(toSet)
|
||||
kvsRepo.set(toSet)
|
||||
kvCache.set(
|
||||
toSet.filter {
|
||||
parentRepo.contains(it.key)
|
||||
@@ -252,7 +252,7 @@ open class FullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun add(toAdd: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.add(toAdd)
|
||||
kvsRepo.add(toAdd)
|
||||
toAdd.forEach {
|
||||
val filtered = it.value.filter { v ->
|
||||
parentRepo.contains(it.key, v)
|
||||
@@ -269,7 +269,7 @@ open class FullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun remove(toRemove: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.remove(toRemove)
|
||||
kvsRepo.remove(toRemove)
|
||||
toRemove.forEach {
|
||||
val filtered = it.value.filter { v ->
|
||||
!parentRepo.contains(it.key, v)
|
||||
@@ -291,7 +291,7 @@ open class FullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun clear(k: Key) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.clear(k)
|
||||
kvsRepo.clear(k)
|
||||
if (parentRepo.contains(k)) {
|
||||
return@withWriteLock
|
||||
}
|
||||
|
@@ -1,24 +1,21 @@
|
||||
package dev.inmo.micro_utils.repos.cache.full.direct
|
||||
|
||||
import dev.inmo.micro_utils.common.*
|
||||
import dev.inmo.micro_utils.coroutines.SmartRWLocker
|
||||
import dev.inmo.micro_utils.coroutines.launchLoggingDropExceptions
|
||||
import dev.inmo.micro_utils.coroutines.withReadAcquire
|
||||
import dev.inmo.micro_utils.coroutines.withWriteLock
|
||||
import dev.inmo.micro_utils.pagination.Pagination
|
||||
import dev.inmo.micro_utils.pagination.PaginationResult
|
||||
import dev.inmo.micro_utils.repos.*
|
||||
import dev.inmo.micro_utils.repos.cache.*
|
||||
import dev.inmo.micro_utils.repos.cache.util.ActualizeAllClearMode
|
||||
import dev.inmo.micro_utils.repos.cache.util.actualizeAll
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
|
||||
open class DirectFullReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
protected open val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected open val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected open val idGetter: (ObjectType) -> IdType
|
||||
protected val parentRepo: ReadCRUDRepo<ObjectType, IdType>,
|
||||
protected val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected val idGetter: (ObjectType) -> IdType
|
||||
) : ReadCRUDRepo<ObjectType, IdType>, DirectFullCacheRepo {
|
||||
protected open suspend fun actualizeAll() {
|
||||
kvCache.actualizeAll(parentRepo, locker = locker)
|
||||
@@ -60,20 +57,20 @@ fun <ObjectType, IdType> ReadCRUDRepo<ObjectType, IdType>.directlyCached(
|
||||
) = DirectFullReadCRUDCacheRepo(this, kvCache, locker, idGetter)
|
||||
|
||||
open class DirectFullCRUDCacheRepo<ObjectType, IdType, InputValueType>(
|
||||
override val parentRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
override val kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
protected val crudRepo: CRUDRepo<ObjectType, IdType, InputValueType>,
|
||||
kvCache: KeyValueRepo<IdType, ObjectType>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
idGetter: (ObjectType) -> IdType
|
||||
) : DirectFullReadCRUDCacheRepo<ObjectType, IdType>(
|
||||
parentRepo,
|
||||
crudRepo,
|
||||
kvCache,
|
||||
locker,
|
||||
idGetter
|
||||
),
|
||||
WriteCRUDRepo<ObjectType, IdType, InputValueType> by WriteCRUDCacheRepo(
|
||||
parentRepo,
|
||||
crudRepo,
|
||||
kvCache,
|
||||
scope,
|
||||
locker,
|
||||
|
@@ -18,9 +18,9 @@ import kotlinx.coroutines.flow.launchIn
|
||||
import kotlinx.coroutines.flow.onEach
|
||||
|
||||
open class DirectFullReadKeyValueCacheRepo<Key, Value>(
|
||||
protected open val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker()
|
||||
protected val parentRepo: ReadKeyValueRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker()
|
||||
) : DirectFullCacheRepo, ReadKeyValueRepo<Key, Value> {
|
||||
protected open suspend fun actualizeAll() {
|
||||
kvCache.actualizeAll(parentRepo, locker)
|
||||
@@ -65,8 +65,8 @@ fun <Key, Value> ReadKeyValueRepo<Key, Value>.directlyCached(
|
||||
) = DirectFullReadKeyValueCacheRepo(this, kvCache, locker)
|
||||
|
||||
open class DirectFullWriteKeyValueCacheRepo<Key, Value>(
|
||||
protected open val parentRepo: WriteKeyValueRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val parentRepo: WriteKeyValueRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
) : DirectFullCacheRepo, WriteKeyValueRepo<Key, Value> by parentRepo {
|
||||
@@ -101,20 +101,20 @@ fun <Key, Value> WriteKeyValueRepo<Key, Value>.directlyCached(
|
||||
) = DirectFullWriteKeyValueCacheRepo(this, kvCache, scope = scope)
|
||||
|
||||
open class DirectFullKeyValueCacheRepo<Key, Value>(
|
||||
override val parentRepo: KeyValueRepo<Key, Value>,
|
||||
override val kvCache: KeyValueRepo<Key, Value>,
|
||||
protected val kvRepo: KeyValueRepo<Key, Value>,
|
||||
kvCache: KeyValueRepo<Key, Value>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
) : DirectFullCacheRepo,
|
||||
KeyValueRepo<Key, Value> ,
|
||||
WriteKeyValueRepo<Key, Value> by DirectFullWriteKeyValueCacheRepo(
|
||||
parentRepo,
|
||||
kvRepo,
|
||||
kvCache,
|
||||
locker,
|
||||
scope
|
||||
),
|
||||
DirectFullReadKeyValueCacheRepo<Key, Value>(parentRepo, kvCache, locker) {
|
||||
DirectFullReadKeyValueCacheRepo<Key, Value>(kvRepo, kvCache, locker) {
|
||||
init {
|
||||
if (!skipStartInvalidate) {
|
||||
scope.launchLoggingDropExceptions {
|
||||
@@ -140,15 +140,15 @@ open class DirectFullKeyValueCacheRepo<Key, Value>(
|
||||
}
|
||||
|
||||
override suspend fun clear() {
|
||||
parentRepo.clear()
|
||||
kvRepo.clear()
|
||||
kvCache.clear()
|
||||
}
|
||||
|
||||
override suspend fun unsetWithValues(toUnset: List<Value>) = parentRepo.unsetWithValues(toUnset)
|
||||
override suspend fun unsetWithValues(toUnset: List<Value>) = kvRepo.unsetWithValues(toUnset)
|
||||
|
||||
override suspend fun set(toSet: Map<Key, Value>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.set(toSet)
|
||||
kvRepo.set(toSet)
|
||||
kvCache.set(
|
||||
toSet.filter {
|
||||
parentRepo.contains(it.key)
|
||||
@@ -159,7 +159,7 @@ open class DirectFullKeyValueCacheRepo<Key, Value>(
|
||||
|
||||
override suspend fun unset(toUnset: List<Key>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.unset(toUnset)
|
||||
kvRepo.unset(toUnset)
|
||||
kvCache.unset(
|
||||
toUnset.filter {
|
||||
!parentRepo.contains(it)
|
||||
|
@@ -15,9 +15,9 @@ import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.flow.*
|
||||
|
||||
open class DirectFullReadKeyValuesCacheRepo<Key,Value>(
|
||||
protected open val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected open val locker: SmartRWLocker = SmartRWLocker(),
|
||||
protected val parentRepo: ReadKeyValuesRepo<Key, Value>,
|
||||
protected val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : ReadKeyValuesRepo<Key, Value>, DirectFullCacheRepo {
|
||||
protected open suspend fun actualizeKey(k: Key) {
|
||||
kvCache.actualizeAll(locker = locker, clearMode = ActualizeAllClearMode.Never) {
|
||||
@@ -100,7 +100,7 @@ fun <Key, Value> ReadKeyValuesRepo<Key, Value>.directlyCached(
|
||||
|
||||
open class DirectFullWriteKeyValuesCacheRepo<Key,Value>(
|
||||
parentRepo: WriteKeyValuesRepo<Key, Value>,
|
||||
protected open val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
protected val locker: SmartRWLocker = SmartRWLocker(),
|
||||
) : WriteKeyValuesRepo<Key, Value> by parentRepo, DirectFullCacheRepo {
|
||||
@@ -135,14 +135,14 @@ fun <Key, Value> WriteKeyValuesRepo<Key, Value>.directlyCached(
|
||||
) = DirectFullWriteKeyValuesCacheRepo(this, kvCache, scope, locker)
|
||||
|
||||
open class DirectFullKeyValuesCacheRepo<Key,Value>(
|
||||
override val parentRepo: KeyValuesRepo<Key, Value>,
|
||||
override val kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
protected val kvsRepo: KeyValuesRepo<Key, Value>,
|
||||
kvCache: KeyValueRepo<Key, List<Value>>,
|
||||
scope: CoroutineScope = CoroutineScope(Dispatchers.Default),
|
||||
skipStartInvalidate: Boolean = false,
|
||||
override val locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
locker: SmartRWLocker = SmartRWLocker(writeIsLocked = !skipStartInvalidate),
|
||||
) : KeyValuesRepo<Key, Value>,
|
||||
DirectFullReadKeyValuesCacheRepo<Key, Value>(parentRepo, kvCache, locker),
|
||||
WriteKeyValuesRepo<Key, Value> by parentRepo {
|
||||
DirectFullReadKeyValuesCacheRepo<Key, Value>(kvsRepo, kvCache, locker),
|
||||
WriteKeyValuesRepo<Key, Value> by kvsRepo {
|
||||
init {
|
||||
if (!skipStartInvalidate) {
|
||||
scope.launchLoggingDropExceptions {
|
||||
@@ -176,7 +176,7 @@ open class DirectFullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun set(toSet: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.set(toSet)
|
||||
kvsRepo.set(toSet)
|
||||
kvCache.set(
|
||||
toSet.filter {
|
||||
parentRepo.contains(it.key)
|
||||
@@ -187,7 +187,7 @@ open class DirectFullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun add(toAdd: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.add(toAdd)
|
||||
kvsRepo.add(toAdd)
|
||||
toAdd.forEach {
|
||||
val filtered = it.value.filter { v ->
|
||||
parentRepo.contains(it.key, v)
|
||||
@@ -204,7 +204,7 @@ open class DirectFullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun remove(toRemove: Map<Key, List<Value>>) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.remove(toRemove)
|
||||
kvsRepo.remove(toRemove)
|
||||
toRemove.forEach {
|
||||
val filtered = it.value.filter { v ->
|
||||
!parentRepo.contains(it.key, v)
|
||||
@@ -226,7 +226,7 @@ open class DirectFullKeyValuesCacheRepo<Key,Value>(
|
||||
|
||||
override suspend fun clear(k: Key) {
|
||||
locker.withWriteLock {
|
||||
parentRepo.clear(k)
|
||||
kvsRepo.clear(k)
|
||||
if (parentRepo.contains(k)) {
|
||||
return@withWriteLock
|
||||
}
|
||||
|
Reference in New Issue
Block a user