apply ktlint to storage.lib as well
This commit is contained in:
parent
d6bd36ad8b
commit
b8ac11e724
12 changed files with 92 additions and 52 deletions
|
@ -148,16 +148,7 @@ dependencies {
|
||||||
androidTestImplementation "io.mockk:mockk-android:$mockk_version"
|
androidTestImplementation "io.mockk:mockk-android:$mockk_version"
|
||||||
}
|
}
|
||||||
|
|
||||||
ktlint {
|
apply from: "${rootProject.rootDir}/gradle/ktlint.gradle"
|
||||||
version = "0.36.0" // https://github.com/pinterest/ktlint/issues/764
|
|
||||||
android = true
|
|
||||||
enableExperimentalRules = false
|
|
||||||
verbose = true
|
|
||||||
disabledRules = [
|
|
||||||
"import-ordering",
|
|
||||||
"no-blank-line-before-rbrace",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
gradle.projectsEvaluated {
|
gradle.projectsEvaluated {
|
||||||
tasks.withType(JavaCompile) {
|
tasks.withType(JavaCompile) {
|
||||||
|
|
11
gradle/ktlint.gradle
Normal file
11
gradle/ktlint.gradle
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
ktlint {
|
||||||
|
version = "0.40.0"
|
||||||
|
android = true
|
||||||
|
enableExperimentalRules = false
|
||||||
|
verbose = true
|
||||||
|
disabledRules = [
|
||||||
|
"import-ordering",
|
||||||
|
"no-blank-line-before-rbrace",
|
||||||
|
"indent", // remove in 0.41 https://github.com/pinterest/ktlint/issues/764
|
||||||
|
]
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ plugins {
|
||||||
id 'com.google.protobuf'
|
id 'com.google.protobuf'
|
||||||
id 'kotlin-android'
|
id 'kotlin-android'
|
||||||
id 'kotlin-kapt'
|
id 'kotlin-kapt'
|
||||||
|
id "org.jlleitschuh.gradle.ktlint" version "9.4.0"
|
||||||
id 'org.jetbrains.dokka' version '1.4.20'
|
id 'org.jetbrains.dokka' version '1.4.20'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,3 +91,5 @@ dependencies {
|
||||||
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
|
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
|
||||||
androidTestImplementation "androidx.test.espresso:espresso-core:$espresso_version"
|
androidTestImplementation "androidx.test.espresso:espresso-core:$espresso_version"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
apply from: "${rootProject.rootDir}/gradle/ktlint.gradle"
|
||||||
|
|
|
@ -15,7 +15,6 @@ internal data class Chunk(
|
||||||
fun toCachedChunk() = CachedChunk(id, 0, size)
|
fun toCachedChunk() = CachedChunk(id, 0, size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
internal class Chunker(
|
internal class Chunker(
|
||||||
private val mac: Mac,
|
private val mac: Mac,
|
||||||
private val chunkSizeMax: Int,
|
private val chunkSizeMax: Int,
|
||||||
|
|
|
@ -6,7 +6,6 @@ import org.calyxos.backup.storage.api.BackupFile
|
||||||
import org.calyxos.backup.storage.api.BackupObserver
|
import org.calyxos.backup.storage.api.BackupObserver
|
||||||
import org.calyxos.backup.storage.ui.Notifications
|
import org.calyxos.backup.storage.ui.Notifications
|
||||||
|
|
||||||
|
|
||||||
public open class NotificationBackupObserver internal constructor(private val n: Notifications) :
|
public open class NotificationBackupObserver internal constructor(private val n: Notifications) :
|
||||||
BackupObserver {
|
BackupObserver {
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,9 @@ internal object Hkdf {
|
||||||
}
|
}
|
||||||
|
|
||||||
val iterations = ceil(outLengthBytes.toDouble() / hmacHasher.macLength).toInt()
|
val iterations = ceil(outLengthBytes.toDouble() / hmacHasher.macLength).toInt()
|
||||||
require(iterations <= 255) { "out length must be maximal 255 * hash-length; requested: $outLengthBytes bytes" }
|
require(iterations <= 255) {
|
||||||
|
"out length must be maximal 255 * hash-length; requested: $outLengthBytes bytes"
|
||||||
|
}
|
||||||
|
|
||||||
val buffer: ByteBuffer = ByteBuffer.allocate(outLengthBytes)
|
val buffer: ByteBuffer = ByteBuffer.allocate(outLengthBytes)
|
||||||
var blockN = ByteArray(0)
|
var blockN = ByteArray(0)
|
||||||
|
|
|
@ -174,8 +174,9 @@ public object DocumentFileExt {
|
||||||
Log.d(TAG, "Children loaded. Continue...")
|
Log.d(TAG, "Children loaded. Continue...")
|
||||||
cursor.close()
|
cursor.close()
|
||||||
val newCursor = query()
|
val newCursor = query()
|
||||||
if (newCursor == null) cont.cancel(IOException("query returned no results"))
|
if (newCursor == null) {
|
||||||
else cont.resume(newCursor)
|
cont.cancel(IOException("query returned no results"))
|
||||||
|
} else cont.resume(newCursor)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -36,7 +36,7 @@ public abstract class SafStoragePlugin(
|
||||||
private val snapshotFiles = HashMap<Long, DocumentFile>()
|
private val snapshotFiles = HashMap<Long, DocumentFile>()
|
||||||
|
|
||||||
private fun timestampToSnapshot(timestamp: Long): String {
|
private fun timestampToSnapshot(timestamp: Long): String {
|
||||||
return "${timestamp}.SeedSnap"
|
return "$timestamp.SeedSnap"
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
|
@ -97,7 +97,9 @@ public abstract class SafStoragePlugin(
|
||||||
chunkFolders[chunkFolderName] = file
|
chunkFolders[chunkFolderName] = file
|
||||||
Log.d(TAG, "Created missing folder $chunkFolderName (${i + 1}/$s)")
|
Log.d(TAG, "Created missing folder $chunkFolderName (${i + 1}/$s)")
|
||||||
}
|
}
|
||||||
if (chunkFolders.size != 256) throw IOException("Only have ${chunkFolders.size} chunk folders.")
|
if (chunkFolders.size != 256) {
|
||||||
|
throw IOException("Only have ${chunkFolders.size} chunk folders.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (s > 0) Log.e(TAG, "Creating $s missing chunk folders took $duration")
|
if (s > 0) Log.e(TAG, "Creating $s missing chunk folders took $duration")
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,7 +67,9 @@ internal class FileRestore(
|
||||||
// ensure directory exists
|
// ensure directory exists
|
||||||
@Suppress("DEPRECATION")
|
@Suppress("DEPRECATION")
|
||||||
val dir = File("${getExternalStorageDirectory()}/${docFile.dir}")
|
val dir = File("${getExternalStorageDirectory()}/${docFile.dir}")
|
||||||
if (!dir.mkdirs() && !dir.isDirectory) throw IOException("Could not create ${dir.absolutePath}")
|
if (!dir.mkdirs() && !dir.isDirectory) {
|
||||||
|
throw IOException("Could not create ${dir.absolutePath}")
|
||||||
|
}
|
||||||
// find non-existing file-name
|
// find non-existing file-name
|
||||||
var file = File(dir, docFile.name)
|
var file = File(dir, docFile.name)
|
||||||
var i = 0
|
var i = 0
|
||||||
|
@ -145,14 +147,12 @@ internal class FileRestore(
|
||||||
return bytesWritten
|
return bytesWritten
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun setLastModifiedOnMediaFile(
|
private fun setLastModifiedOnMediaFile(mediaFile: BackupMediaFile, uri: Uri) {
|
||||||
mediaFile: BackupMediaFile, uri: Uri
|
|
||||||
) {
|
|
||||||
@Suppress("DEPRECATION")
|
@Suppress("DEPRECATION")
|
||||||
val extDir = getExternalStorageDirectory()
|
val extDir = getExternalStorageDirectory()
|
||||||
|
|
||||||
// re-set lastModified as we can't use the MediaStore for this (read-only property)
|
// re-set lastModified as we can't use the MediaStore for this (read-only property)
|
||||||
val path = "${extDir}/${mediaFile.path}/${mediaFile.name}"
|
val path = "$extDir/${mediaFile.path}/${mediaFile.name}"
|
||||||
val file = File(path)
|
val file = File(path)
|
||||||
if (file.isFile) {
|
if (file.isFile) {
|
||||||
file.setLastModified(mediaFile.lastModified)
|
file.setLastModified(mediaFile.lastModified)
|
||||||
|
@ -163,9 +163,9 @@ internal class FileRestore(
|
||||||
// so try to find it in MediaStore
|
// so try to find it in MediaStore
|
||||||
val relPath = mediaScanner.getPath(uri)
|
val relPath = mediaScanner.getPath(uri)
|
||||||
if (relPath == null) {
|
if (relPath == null) {
|
||||||
Log.w(TAG, "Did not find $path with $uri after inserting, can't set lastModified")
|
Log.w(TAG, "Did not find $path with $uri, can't set lastModified")
|
||||||
} else {
|
} else {
|
||||||
val newPath = "${extDir}/$relPath"
|
val newPath = "$extDir/$relPath"
|
||||||
val newFile = File(newPath)
|
val newFile = File(newPath)
|
||||||
Log.w(TAG, "WARNING: ${mediaFile.name} is now ${newFile.path}")
|
Log.w(TAG, "WARNING: ${mediaFile.name} is now ${newFile.path}")
|
||||||
if (newFile.isFile) {
|
if (newFile.isFile) {
|
||||||
|
|
|
@ -60,7 +60,7 @@ public class MediaScanner(context: Context) {
|
||||||
val extras = Bundle().apply {
|
val extras = Bundle().apply {
|
||||||
val query = StringBuilder()
|
val query = StringBuilder()
|
||||||
if (SDK_INT >= 30 && uri != MediaType.Downloads.contentUri) {
|
if (SDK_INT >= 30 && uri != MediaType.Downloads.contentUri) {
|
||||||
query.append("${IS_DOWNLOAD}=0")
|
query.append("$IS_DOWNLOAD=0")
|
||||||
}
|
}
|
||||||
extraQuery?.let {
|
extraQuery?.let {
|
||||||
if (query.isNotEmpty()) query.append(" AND ")
|
if (query.isNotEmpty()) query.append(" AND ")
|
||||||
|
@ -98,7 +98,9 @@ public class MediaScanner(context: Context) {
|
||||||
PROJECTION_GENERATION_MODIFIED
|
PROJECTION_GENERATION_MODIFIED
|
||||||
) else null,
|
) else null,
|
||||||
size = cursor.getLong(PROJECTION_SIZE),
|
size = cursor.getLong(PROJECTION_SIZE),
|
||||||
isFavorite = if (SDK_INT >= 30) cursor.getIntOrNull(PROJECTION_IS_FAVORITE) == 1 else false,
|
isFavorite = if (SDK_INT >= 30) {
|
||||||
|
cursor.getIntOrNull(PROJECTION_IS_FAVORITE) == 1
|
||||||
|
} else false,
|
||||||
ownerPackageName = cursor.getStringOrNull(PROJECTION_OWNER_PACKAGE_NAME),
|
ownerPackageName = cursor.getStringOrNull(PROJECTION_OWNER_PACKAGE_NAME),
|
||||||
volume = cursor.getString(PROJECTION_VOLUME_NAME)
|
volume = cursor.getString(PROJECTION_VOLUME_NAME)
|
||||||
)
|
)
|
||||||
|
@ -112,7 +114,7 @@ public class MediaScanner(context: Context) {
|
||||||
private fun getRealSize(mediaFile: MediaFile): Long {
|
private fun getRealSize(mediaFile: MediaFile): Long {
|
||||||
@Suppress("DEPRECATION")
|
@Suppress("DEPRECATION")
|
||||||
val extDir = Environment.getExternalStorageDirectory()
|
val extDir = Environment.getExternalStorageDirectory()
|
||||||
val path = "${extDir}/${mediaFile.dirPath}/${mediaFile.fileName}"
|
val path = "$extDir/${mediaFile.dirPath}/${mediaFile.fileName}"
|
||||||
return try {
|
return try {
|
||||||
File(path).length()
|
File(path).length()
|
||||||
} catch (e: Exception) {
|
} catch (e: Exception) {
|
||||||
|
|
|
@ -53,7 +53,6 @@ import java.io.OutputStream
|
||||||
import javax.crypto.spec.SecretKeySpec
|
import javax.crypto.spec.SecretKeySpec
|
||||||
import kotlin.random.Random
|
import kotlin.random.Random
|
||||||
|
|
||||||
|
|
||||||
@Suppress("BlockingMethodInNonBlockingContext")
|
@Suppress("BlockingMethodInNonBlockingContext")
|
||||||
internal class BackupRestoreTest {
|
internal class BackupRestoreTest {
|
||||||
|
|
||||||
|
@ -261,10 +260,18 @@ internal class BackupRestoreTest {
|
||||||
|
|
||||||
// first 3 chunks are not cached on 1st invocation, but afterwards. Last chunk never cached
|
// first 3 chunks are not cached on 1st invocation, but afterwards. Last chunk never cached
|
||||||
// also needed to ensure that we don't write chunks more than once into the same stream
|
// also needed to ensure that we don't write chunks more than once into the same stream
|
||||||
expectCacheMissAndThenHit("040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
expectCacheMissAndThenHit(
|
||||||
expectCacheMissAndThenHit("901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
"040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3"
|
||||||
expectCacheMissAndThenHit("5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
)
|
||||||
every { chunksCache.get("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67") } returns null
|
expectCacheMissAndThenHit(
|
||||||
|
"901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29"
|
||||||
|
)
|
||||||
|
expectCacheMissAndThenHit(
|
||||||
|
"5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d"
|
||||||
|
)
|
||||||
|
every {
|
||||||
|
chunksCache.get("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
||||||
|
} returns null
|
||||||
|
|
||||||
// file input streams
|
// file input streams
|
||||||
// don't return the same stream twice here, as we don't reset it, but read it twice
|
// don't return the same stream twice here, as we don't reset it, but read it twice
|
||||||
|
@ -282,19 +289,27 @@ internal class BackupRestoreTest {
|
||||||
// output streams for deterministic chunks
|
// output streams for deterministic chunks
|
||||||
val id040f32 = ByteArrayOutputStream()
|
val id040f32 = ByteArrayOutputStream()
|
||||||
every {
|
every {
|
||||||
plugin.getChunkOutputStream("040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
plugin.getChunkOutputStream(
|
||||||
|
"040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3"
|
||||||
|
)
|
||||||
} returns id040f32
|
} returns id040f32
|
||||||
val id901fbc = ByteArrayOutputStream()
|
val id901fbc = ByteArrayOutputStream()
|
||||||
every {
|
every {
|
||||||
plugin.getChunkOutputStream("901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
plugin.getChunkOutputStream(
|
||||||
|
"901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29"
|
||||||
|
)
|
||||||
} returns id901fbc
|
} returns id901fbc
|
||||||
val id5adea3 = ByteArrayOutputStream()
|
val id5adea3 = ByteArrayOutputStream()
|
||||||
every {
|
every {
|
||||||
plugin.getChunkOutputStream("5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
plugin.getChunkOutputStream(
|
||||||
|
"5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d"
|
||||||
|
)
|
||||||
} returns id5adea3
|
} returns id5adea3
|
||||||
val id40d00c = ByteArrayOutputStream()
|
val id40d00c = ByteArrayOutputStream()
|
||||||
every {
|
every {
|
||||||
plugin.getChunkOutputStream("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
plugin.getChunkOutputStream(
|
||||||
|
"40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67"
|
||||||
|
)
|
||||||
} returns id40d00c
|
} returns id40d00c
|
||||||
|
|
||||||
every { chunksCache.insert(any<CachedChunk>()) } just Runs
|
every { chunksCache.insert(any<CachedChunk>()) } just Runs
|
||||||
|
@ -310,10 +325,14 @@ internal class BackupRestoreTest {
|
||||||
|
|
||||||
// chunks were only written to storage once
|
// chunks were only written to storage once
|
||||||
verify(exactly = 1) {
|
verify(exactly = 1) {
|
||||||
plugin.getChunkOutputStream("040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
plugin.getChunkOutputStream(
|
||||||
plugin.getChunkOutputStream("901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
"040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
||||||
plugin.getChunkOutputStream("5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
plugin.getChunkOutputStream(
|
||||||
plugin.getChunkOutputStream("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
"901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
||||||
|
plugin.getChunkOutputStream(
|
||||||
|
"5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
||||||
|
plugin.getChunkOutputStream(
|
||||||
|
"40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
||||||
}
|
}
|
||||||
|
|
||||||
// RESTORE
|
// RESTORE
|
||||||
|
@ -334,16 +353,20 @@ internal class BackupRestoreTest {
|
||||||
|
|
||||||
// pipe chunks back in
|
// pipe chunks back in
|
||||||
coEvery {
|
coEvery {
|
||||||
plugin.getChunkInputStream("040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
plugin.getChunkInputStream(
|
||||||
|
"040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
||||||
} returns ByteArrayInputStream(id040f32.toByteArray())
|
} returns ByteArrayInputStream(id040f32.toByteArray())
|
||||||
coEvery {
|
coEvery {
|
||||||
plugin.getChunkInputStream("901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
plugin.getChunkInputStream(
|
||||||
|
"901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
||||||
} returns ByteArrayInputStream(id901fbc.toByteArray())
|
} returns ByteArrayInputStream(id901fbc.toByteArray())
|
||||||
coEvery {
|
coEvery {
|
||||||
plugin.getChunkInputStream("5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
plugin.getChunkInputStream(
|
||||||
|
"5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
||||||
} returns ByteArrayInputStream(id5adea3.toByteArray())
|
} returns ByteArrayInputStream(id5adea3.toByteArray())
|
||||||
coEvery {
|
coEvery {
|
||||||
plugin.getChunkInputStream("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
plugin.getChunkInputStream(
|
||||||
|
"40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
||||||
} returns ByteArrayInputStream(id40d00c.toByteArray())
|
} returns ByteArrayInputStream(id40d00c.toByteArray())
|
||||||
|
|
||||||
// provide file output streams for restore
|
// provide file output streams for restore
|
||||||
|
@ -360,10 +383,14 @@ internal class BackupRestoreTest {
|
||||||
|
|
||||||
// chunks were only read from storage once
|
// chunks were only read from storage once
|
||||||
coVerify(exactly = 1) {
|
coVerify(exactly = 1) {
|
||||||
plugin.getChunkInputStream("040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
plugin.getChunkInputStream(
|
||||||
plugin.getChunkInputStream("901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
"040f3204869543c4015d92c04bf875b25ebde55f9645380f4172aa439b2825d3")
|
||||||
plugin.getChunkInputStream("5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
plugin.getChunkInputStream(
|
||||||
plugin.getChunkInputStream("40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
"901fbcf9a94271fc0455d0052522cab994f9392d0bb85187860282b4beadfb29")
|
||||||
|
plugin.getChunkInputStream(
|
||||||
|
"5adea3149fe6cf9c6e3270a52ee2c31bc9dfcef5f2080b583a4dd3b779c9182d")
|
||||||
|
plugin.getChunkInputStream(
|
||||||
|
"40d00c1be4b0f89e8b12d47f3658aa42f568a8d02b978260da6d0050e7007e67")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,11 +21,14 @@ internal class HkdfTest {
|
||||||
fun rfc5869testCase2() {
|
fun rfc5869testCase2() {
|
||||||
checkStep2(
|
checkStep2(
|
||||||
"06a6b88c5853361a06104c9ceb35b45cef760014904671014a193f40c15fc244",
|
"06a6b88c5853361a06104c9ceb35b45cef760014904671014a193f40c15fc244",
|
||||||
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9" +
|
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4" +
|
||||||
|
"c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9" +
|
||||||
"dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
|
"dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
|
||||||
82,
|
82,
|
||||||
"b11e398dc80327a1c8e7f78c596a49344f012eda2d4efad8a050cc4c19afa97c59045a99cac7827271c" +
|
"b11e398dc80327a1c8e7f78c596a49344f012eda2" +
|
||||||
"b41c65e590e09da3275600c2f09b8367793a9aca3db71cc30c58179ec3e87c14c01d5c1f3434f1d87"
|
"d4efad8a050cc4c19afa97c59045a99cac7827271c" +
|
||||||
|
"b41c65e590e09da3275600c2f09b8367793a9aca3db" +
|
||||||
|
"71cc30c58179ec3e87c14c01d5c1f3434f1d87"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue