Fix flakyness in SmallFileBackupIntegrationTest again

Since the chunk gets zipped, the random input data would sometimes differ in size if randomness wasn't truly random, so it could be compressed more.
This commit is contained in:
Torsten Grote 2024-04-05 08:45:52 -03:00
parent fef6ecc640
commit 2bfbd1a2e4
No known key found for this signature in database
GPG key ID: 3E5F77D92CF891FF

View file

@ -105,7 +105,9 @@ internal class SmallFileBackupIntegrationTest {
it.copy(lastSeen = cachedFile2.lastSeen) == cachedFile2 it.copy(lastSeen = cachedFile2.lastSeen) == cachedFile2
}) })
} just Runs } just Runs
coEvery { observer.onFileBackedUp(file2, true, 0, 181, "S") } just Runs coEvery {
observer.onFileBackedUp(file2, true, 0, match<Long> { it <= outputStream2.size() }, "S")
} just Runs
val result = smallFileBackup.backupFiles(files, availableChunkIds, observer) val result = smallFileBackup.backupFiles(files, availableChunkIds, observer)
assertEquals(setOf(chunkId.toHexString()), result.chunkIds) assertEquals(setOf(chunkId.toHexString()), result.chunkIds)
@ -114,7 +116,7 @@ internal class SmallFileBackupIntegrationTest {
assertEquals(0, result.backupMediaFiles.size) assertEquals(0, result.backupMediaFiles.size)
coVerify { coVerify {
observer.onFileBackedUp(file2, true, 0, 181, "S") observer.onFileBackedUp(file2, true, 0, match<Long> { it <= outputStream2.size() }, "S")
} }
} }