Fix flakyness in SmallFileBackupIntegrationTest again

Since the chunk gets zipped, the random input data would sometimes differ in size if randomness wasn't truly random, so it could be compressed more.
This commit is contained in:
Torsten Grote 2024-04-05 08:45:52 -03:00 committed by Chirayu Desai
parent fef6ecc640
commit 9557dfd4e7

View file

@ -105,7 +105,9 @@ internal class SmallFileBackupIntegrationTest {
it.copy(lastSeen = cachedFile2.lastSeen) == cachedFile2
})
} just Runs
coEvery { observer.onFileBackedUp(file2, true, 0, 181, "S") } just Runs
coEvery {
observer.onFileBackedUp(file2, true, 0, match<Long> { it <= outputStream2.size() }, "S")
} just Runs
val result = smallFileBackup.backupFiles(files, availableChunkIds, observer)
assertEquals(setOf(chunkId.toHexString()), result.chunkIds)
@ -114,7 +116,7 @@ internal class SmallFileBackupIntegrationTest {
assertEquals(0, result.backupMediaFiles.size)
coVerify {
observer.onFileBackedUp(file2, true, 0, 181, "S")
observer.onFileBackedUp(file2, true, 0, match<Long> { it <= outputStream2.size() }, "S")
}
}