summaryrefslogtreecommitdiff
path: root/core/commonMain/src/kotlinx/serialization/encoding/ChunkedDecoder.kt
blob: 016e07e20f4de1817dffe24af8e47473429c45ab (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
package kotlinx.serialization.encoding

import kotlinx.serialization.ExperimentalSerializationApi

/**
 * This interface indicates that decoder supports consuming large strings by chunks via consumeChunk method.
 * Currently, only streaming json decoder implements this interface.
 * Please note that this interface is only applicable to streaming decoders. That means that it is not possible to use
 * some JsonTreeDecoder features like polymorphism with this interface.
 */
@ExperimentalSerializationApi
public interface ChunkedDecoder {
    /**
     * Method allows decoding a string value by fixed-size chunks.
     * Usable for handling very large strings that may not fit in memory.
     * Chunk size is guaranteed to not exceed 16384 chars (but it may be smaller than that).
     * Feeds string chunks to the provided consumer.
     *
     * @param consumeChunk - lambda function to handle string chunks
     *
     * Example usage:
     * ```
     * @Serializable(with = LargeStringSerializer::class)
     * data class LargeStringData(val largeString: String)
     *
     * @Serializable
     * data class ClassWithLargeStringDataField(val largeStringField: LargeStringData)
     *
     * object LargeStringSerializer : KSerializer<LargeStringData> {
     *     override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("LargeStringContent", PrimitiveKind.STRING)
     *
     *     override fun deserialize(decoder: Decoder): LargeStringData {
     *         require(decoder is ChunkedDecoder) { "Only chunked decoder supported" }
     *
     *         val tmpFile = createTempFile()
     *         val writer = FileWriter(tmpFile.toFile()).use {
     *             decoder.decodeStringChunked { chunk ->
     *                 writer.append(chunk)
     *             }
     *         }
     *         return LargeStringData("file://${tmpFile.absolutePathString()}")
     *     }
     * }
     * ```
     *
     * In this sample, we need to be able to handle a huge string coming from json. Instead of storing it in memory,
     * we offload it into a file and return the file name instead
     */
    @ExperimentalSerializationApi
    public fun decodeStringChunked(consumeChunk: (chunk: String) -> Unit)
}