chromium/third_party/libavif/src/src/write.c

// Copyright 2019 Joe Drago. All rights reserved.
// SPDX-License-Identifier: BSD-2-Clause

#include "avif/internal.h"

#include <assert.h>
#include <string.h>
#include <time.h>

#define MAX_ASSOCIATIONS
struct ipmaArray
{};

// Used to store offsets in meta boxes which need to point at mdat offsets that
// aren't known yet. When an item's mdat payload is written, all registered fixups
// will have this now-known offset "fixed up".
avifOffsetFixup;
AVIF_ARRAY_DECLARE(avifOffsetFixupArray, avifOffsetFixup, } ;

static const char alphaURN[] =;
static const size_t alphaURNSize =;

static const char xmpContentType[] =;
static const size_t xmpContentTypeSize =;

static avifResult writeCodecConfig(avifRWStream * s, const avifCodecConfigurationBox * cfg);
static avifResult writeConfigBox(avifRWStream * s, const avifCodecConfigurationBox * cfg, const char * configPropName);

// ---------------------------------------------------------------------------
// avifSetTileConfiguration

static int floorLog2(uint32_t n)
{}

// Splits tilesLog2 into *tileDim1Log2 and *tileDim2Log2, considering the ratio of dim1 to dim2.
//
// Precondition:
//     dim1 >= dim2
// Postcondition:
//     tilesLog2 == *tileDim1Log2 + *tileDim2Log2
//     *tileDim1Log2 >= *tileDim2Log2
static void splitTilesLog2(uint32_t dim1, uint32_t dim2, int tilesLog2, int * tileDim1Log2, int * tileDim2Log2)
{}

// Set the tile configuration: the number of tiles and the tile size.
//
// Tiles improve encoding and decoding speeds when multiple threads are available. However, for
// image coding, the total tile boundary length affects the compression efficiency because intra
// prediction can't go across tile boundaries. So the more tiles there are in an image, the worse
// the compression ratio is. For a given number of tiles, making the tile size close to a square
// tends to reduce the total tile boundary length inside the image. Use more tiles along the longer
// dimension of the image to make the tile size closer to a square.
void avifSetTileConfiguration(int threads, uint32_t width, uint32_t height, int * tileRowsLog2, int * tileColsLog2)
{}

// ---------------------------------------------------------------------------
// avifCodecEncodeOutput

avifCodecEncodeOutput * avifCodecEncodeOutputCreate(void)
{}

avifResult avifCodecEncodeOutputAddSample(avifCodecEncodeOutput * encodeOutput, const uint8_t * data, size_t len, avifBool sync)
{}

void avifCodecEncodeOutputDestroy(avifCodecEncodeOutput * encodeOutput)
{}

// ---------------------------------------------------------------------------
// avifEncoderItem

// one "item" worth for encoder
avifEncoderItem;
AVIF_ARRAY_DECLARE(avifEncoderItemArray, avifEncoderItem, } ;

// ---------------------------------------------------------------------------
// avifEncoderItemReference

// pointer to one "item" interested in
avifEncoderItemReference;
AVIF_ARRAY_DECLARE(avifEncoderItemReferenceArray, avifEncoderItemReference, } ;

// ---------------------------------------------------------------------------
// avifEncoderFrame

avifEncoderFrame;
AVIF_ARRAY_DECLARE(avifEncoderFrameArray, avifEncoderFrame, } ;

// ---------------------------------------------------------------------------
// avifEncoderData

AVIF_ARRAY_DECLARE(avifEncoderItemIdArray, uint16_t, } ;

avifEncoderData;

static void avifEncoderDataDestroy(avifEncoderData * data);

// Returns NULL if a memory allocation failed.
static avifEncoderData * avifEncoderDataCreate(void)
{}

static avifEncoderItem * avifEncoderDataCreateItem(avifEncoderData * data, const char * type, const char * infeName, size_t infeNameSize, uint32_t cellIndex)
{}

static avifEncoderItem * avifEncoderDataFindItemByID(avifEncoderData * data, uint16_t id)
{}

static void avifEncoderDataDestroy(avifEncoderData * data)
{}

static avifResult avifEncoderItemAddMdatFixup(avifEncoderItem * item, const avifRWStream * s)
{}

// ---------------------------------------------------------------------------
// avifItemPropertyDedup - Provides ipco deduplication

avifItemProperty;
AVIF_ARRAY_DECLARE(avifItemPropertyArray, avifItemProperty, } ;

avifItemPropertyDedup;

static avifItemPropertyDedup * avifItemPropertyDedupCreate(void)
{}

static void avifItemPropertyDedupDestroy(avifItemPropertyDedup * dedup)
{}

// Resets the dedup's temporary write stream in preparation for a single item property's worth of writing
static void avifItemPropertyDedupStart(avifItemPropertyDedup * dedup)
{}

// This compares the newly written item property (in the dedup's temporary storage buffer) to
// already-written properties (whose offsets/sizes in outputStream are recorded in the dedup). If a
// match is found, the previous property's index is used. If this new property is unique, it is
// assigned the next available property index, written to the output stream, and its offset/size in
// the output stream is recorded in the dedup for future comparisons.
//
// On success, this function adds to the given ipma box a property association linking the reused
// or newly created property with the item.
static avifResult avifItemPropertyDedupFinish(avifItemPropertyDedup * dedup, avifRWStream * outputStream, struct ipmaArray * ipma, avifBool essential)
{}

// ---------------------------------------------------------------------------

static const avifScalingMode noScaling =;

avifEncoder * avifEncoderCreate(void)
{}

void avifEncoderDestroy(avifEncoder * encoder)
{}

avifResult avifEncoderSetCodecSpecificOption(avifEncoder * encoder, const char * key, const char * value)
{}

static void avifEncoderBackupSettings(avifEncoder * encoder)
{}

// This function detects changes made on avifEncoder. It returns true on success (i.e., if every
// change is valid), or false on failure (i.e., if any setting that can't change was changed). It
// reports a bitwise-OR of detected changes in encoderChanges.
static avifBool avifEncoderDetectChanges(const avifEncoder * encoder, avifEncoderChanges * encoderChanges)
{}

// Same as 'avifEncoderWriteColorProperties' but for the colr nclx box only.
static avifResult avifEncoderWriteNclxProperty(avifRWStream * dedupStream,
                                               avifRWStream * outputStream,
                                               const avifImage * imageMetadata,
                                               struct ipmaArray * ipma,
                                               avifItemPropertyDedup * dedup)
{}

// Subset of avifEncoderWriteColorProperties() for the properties pasp, clap, irot, imir.
static avifResult avifEncoderWriteExtendedColorProperties(avifRWStream * dedupStream,
                                                          avifRWStream * outputStream,
                                                          const avifImage * imageMetadata,
                                                          struct ipmaArray * ipma,
                                                          avifItemPropertyDedup * dedup);

// This function is used in two codepaths:
// * writing color *item* properties
// * writing color *track* properties
//
// Item properties must have property associations with them and can be deduplicated (by reusing
// these associations), so this function leverages the ipma and dedup arguments to do this.
//
// Track properties, however, are implicitly associated by the track in which they are contained, so
// there is no need to build a property association box (ipma), and no way to deduplicate/reuse a
// property. In this case, the ipma and dedup properties should/will be set to NULL, and this
// function will avoid using them.
static avifResult avifEncoderWriteColorProperties(avifRWStream * outputStream,
                                                  const avifImage * imageMetadata,
                                                  struct ipmaArray * ipma,
                                                  avifItemPropertyDedup * dedup)
{}

// Same as 'avifEncoderWriteColorProperties' but for properties related to High Dynamic Range only.
static avifResult avifEncoderWriteHDRProperties(avifRWStream * dedupStream,
                                                avifRWStream * outputStream,
                                                const avifImage * imageMetadata,
                                                struct ipmaArray * ipma,
                                                avifItemPropertyDedup * dedup)
{}

static avifResult avifEncoderWriteExtendedColorProperties(avifRWStream * dedupStream,
                                                          avifRWStream * outputStream,
                                                          const avifImage * imageMetadata,
                                                          struct ipmaArray * ipma,
                                                          avifItemPropertyDedup * dedup)
{}

static avifResult avifRWStreamWriteHandlerBox(avifRWStream * s, const char handlerType[4])
{}

// Write unassociated metadata items (EXIF, XMP) to a small meta box inside of a trak box.
// These items are implicitly associated with the track they are contained within.
static avifResult avifEncoderWriteTrackMetaBox(avifEncoder * encoder, avifRWStream * s)
{}

static avifResult avifWriteGridPayload(avifRWData * data, uint32_t gridCols, uint32_t gridRows, uint32_t gridWidth, uint32_t gridHeight)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP)

static avifBool avifWriteToneMappedImagePayload(avifRWData * data, const avifGainMapMetadata * metadata)
{}

size_t avifEncoderGetGainMapSizeBytes(avifEncoder * encoder)
{}

// Sets altImageMetadata's metadata values to represent the "alternate" image as if applying the gain map to the base image.
static avifResult avifImageCopyAltImageMetadata(avifImage * altImageMetadata, const avifImage * imageWithGainMap)
{}
#endif // AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
static avifResult avifEncoderWriteSampleTransformTokens(avifRWStream * s, const avifSampleTransformExpression * expression)
{
    AVIF_ASSERT_OR_RETURN(expression->count <= 256);
    AVIF_CHECKRES(avifRWStreamWriteU8(s, (uint8_t)expression->count)); // unsigned int(8) token_count;

    for (uint32_t t = 0; t < expression->count; ++t) {
        const avifSampleTransformToken * token = &expression->tokens[t];
        AVIF_CHECKRES(avifRWStreamWriteU8(s, token->type)); // unsigned int(8) token;

        if (token->type == AVIF_SAMPLE_TRANSFORM_CONSTANT) {
            // TODO(yguyon): Verify two's complement representation is guaranteed here.
            const uint32_t constant = *(const uint32_t *)&token->constant;
            AVIF_CHECKRES(avifRWStreamWriteU32(s, constant)); // signed int(1<<(bit_depth+3)) constant;
        } else if (token->type == AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX) {
            AVIF_CHECKRES(avifRWStreamWriteU8(s, token->inputImageItemIndex)); // unsigned int(8) input_image_item_index;
        }
    }
    return AVIF_RESULT_OK;
}

static avifResult avifEncoderWriteSampleTransformPayload(avifEncoder * encoder, avifRWData * data)
{
    avifRWStream s;
    avifRWStreamStart(&s, data);
    AVIF_CHECKRES(avifRWStreamWriteBits(&s, 0, /*bitCount=*/6)); // unsigned int(6) version = 0;
    // AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32 is necessary because the two input images
    // once combined use 16-bit unsigned values, but intermediate results are stored in signed integers.
    AVIF_CHECKRES(avifRWStreamWriteBits(&s, AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32, /*bitCount=*/2)); // unsigned int(2) bit_depth;

    avifSampleTransformExpression expression = { 0 };
    AVIF_CHECKRES(avifSampleTransformRecipeToExpression(encoder->sampleTransformRecipe, &expression));
    const avifResult result = avifEncoderWriteSampleTransformTokens(&s, &expression);
    avifArrayDestroy(&expression);
    if (result != AVIF_RESULT_OK) {
        avifDiagnosticsPrintf(&encoder->diag, "Failed to write sample transform metadata for recipe %d", (int)encoder->sampleTransformRecipe);
        return result;
    }

    avifRWStreamFinishWrite(&s);
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM

static avifResult avifEncoderDataCreateExifItem(avifEncoderData * data, const avifRWData * exif)
{}

static avifResult avifEncoderDataCreateXMPItem(avifEncoderData * data, const avifRWData * xmp)
{}

// Same as avifImageCopy() but pads the dstImage with border pixel values to reach dstWidth and dstHeight.
static avifResult avifImageCopyAndPad(avifImage * const dstImage, const avifImage * srcImage, uint32_t dstWidth, uint32_t dstHeight)
{}

static int avifQualityToQuantizer(int quality, int minQuantizer, int maxQuantizer)
{}

static const char infeNameColor[] =;
static const char infeNameAlpha[] =;
#if defined(AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP)
static const char infeNameGainMap[] =;
#endif
#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
static const char infeNameSampleTransform[] = "SampleTransform";
#endif

static const char * getInfeName(avifItemCategory itemCategory)
{}

// Adds the items for a single cell or a grid of cells. Outputs the topLevelItemID which is
// the only item if there is exactly one cell, or the grid item for multiple cells.
// Note: The topLevelItemID output argument has the type uint16_t* instead of avifEncoderItem** because
//       the avifEncoderItem pointer may be invalidated by a call to avifEncoderDataCreateItem().
static avifResult avifEncoderAddImageItems(avifEncoder * encoder,
                                           uint32_t gridCols,
                                           uint32_t gridRows,
                                           uint32_t gridWidth,
                                           uint32_t gridHeight,
                                           avifItemCategory itemCategory,
                                           uint16_t * topLevelItemID)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
static avifResult avifEncoderCreateBitDepthExtensionItems(avifEncoder * encoder,
                                                          uint32_t gridCols,
                                                          uint32_t gridRows,
                                                          uint32_t gridWidth,
                                                          uint32_t gridHeight,
                                                          uint16_t colorItemID)
{
    AVIF_ASSERT_OR_RETURN(encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_8B_8B ||
                          encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_12B_4B ||
                          encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_12B_8B_OVERLAP_4B);

    // There are multiple possible ISOBMFF box hierarchies for translucent images,
    // using 'sato' (Sample Transform) derived image items:
    //  - a primary 'sato' item uses a main color coded item and a hidden color coded item; each color coded
    //    item has an auxiliary alpha coded item; the main color coded item and the 'sato' item are in
    //    an 'altr' group (backward-compatible, implemented)
    //  - a primary 'sato' item uses a main color coded item and a hidden color coded item; the primary
    //    'sato' item has an auxiliary alpha 'sato' item using two alpha coded items (backward-incompatible)
    // Likewise, there are multiple possible ISOBMFF box hierarchies for bit-depth-extended grids,
    // using 'sato' (Sample Transform) derived image items:
    //  - a primary color 'grid', an auxiliary alpha 'grid', a hidden color 'grid', a hidden auxiliary alpha 'grid'
    //    and a 'sato' using the two color 'grid's as input items in this order; the primary color item
    //    and the 'sato' item being in an 'altr' group (backward-compatible, implemented)
    //  - a primary 'grid' of 'sato' cells and an auxiliary alpha 'grid' of 'sato' cells (backward-incompatible)
    avifEncoderItem * sampleTransformItem = avifEncoderDataCreateItem(encoder->data,
                                                                      "sato",
                                                                      infeNameSampleTransform,
                                                                      /*infeNameSize=*/strlen(infeNameSampleTransform) + 1,
                                                                      /*cellIndex=*/0);
    AVIF_CHECKRES(avifEncoderWriteSampleTransformPayload(encoder, &sampleTransformItem->metadataPayload));
    sampleTransformItem->itemCategory = AVIF_ITEM_SAMPLE_TRANSFORM;
    uint16_t sampleTransformItemID = sampleTransformItem->id;
    // 'altr' group
    uint16_t * alternativeItemID = (uint16_t *)avifArrayPush(&encoder->data->alternativeItemIDs);
    AVIF_CHECKERR(alternativeItemID != NULL, AVIF_RESULT_OUT_OF_MEMORY);
    *alternativeItemID = sampleTransformItem->id;
    alternativeItemID = (uint16_t *)avifArrayPush(&encoder->data->alternativeItemIDs);
    AVIF_CHECKERR(alternativeItemID != NULL, AVIF_RESULT_OUT_OF_MEMORY);
    *alternativeItemID = colorItemID;

    uint16_t bitDepthExtensionColorItemId;
    AVIF_CHECKRES(
        avifEncoderAddImageItems(encoder, gridCols, gridRows, gridWidth, gridHeight, AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR, &bitDepthExtensionColorItemId));
    avifEncoderItem * bitDepthExtensionColorItem = avifEncoderDataFindItemByID(encoder->data, bitDepthExtensionColorItemId);
    assert(bitDepthExtensionColorItem);
    bitDepthExtensionColorItem->hiddenImage = AVIF_TRUE;

    // Set the color and bit depth extension items' dimgFromID value to point to the sample transform item.
    // The color item shall be first, and the bit depth extension item second. avifEncoderFinish() writes the
    // dimg item references in item id order, so as long as colorItemID < bitDepthExtensionColorItemId, the order
    // will be correct.
    AVIF_ASSERT_OR_RETURN(colorItemID < bitDepthExtensionColorItemId);
    avifEncoderItem * colorItem = avifEncoderDataFindItemByID(encoder->data, colorItemID);
    AVIF_ASSERT_OR_RETURN(colorItem != NULL);
    AVIF_ASSERT_OR_RETURN(colorItem->dimgFromID == 0); // Our internal API only allows one dimg value per item.
    colorItem->dimgFromID = sampleTransformItemID;
    bitDepthExtensionColorItem->dimgFromID = sampleTransformItemID;

    if (encoder->data->alphaPresent) {
        uint16_t bitDepthExtensionAlphaItemId;
        AVIF_CHECKRES(
            avifEncoderAddImageItems(encoder, gridCols, gridRows, gridWidth, gridHeight, AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_ALPHA, &bitDepthExtensionAlphaItemId));
        avifEncoderItem * bitDepthExtensionAlphaItem = avifEncoderDataFindItemByID(encoder->data, bitDepthExtensionAlphaItemId);
        assert(bitDepthExtensionAlphaItem);
        bitDepthExtensionAlphaItem->irefType = "auxl";
        bitDepthExtensionAlphaItem->irefToID = bitDepthExtensionColorItemId;
        if (encoder->data->imageMetadata->alphaPremultiplied) {
            // The reference may have changed; fetch it again.
            bitDepthExtensionColorItem = avifEncoderDataFindItemByID(encoder->data, bitDepthExtensionColorItemId);
            assert(bitDepthExtensionColorItem);
            bitDepthExtensionColorItem->irefType = "prem";
            bitDepthExtensionColorItem->irefToID = bitDepthExtensionAlphaItemId;
        }
    }
    return AVIF_RESULT_OK;
}

// Same as avifImageApplyExpression() but for the expression (inputImageItem [op] constant).
// Convenience function.
static avifResult avifImageApplyImgOpConst(avifImage * result,
                                           const avifImage * inputImageItem,
                                           avifSampleTransformTokenType op,
                                           int32_t constant,
                                           avifPlanesFlags planes)
{
    // Postfix notation.
    const avifSampleTransformToken tokens[] = { { AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX, 0, /*inputImageItemIndex=*/1 },
                                                { AVIF_SAMPLE_TRANSFORM_CONSTANT, constant, 0 },
                                                { (uint8_t)op, 0, 0 } };
    return avifImageApplyOperations(result, AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32, /*numTokens=*/3, tokens, /*numInputImageItems=*/1, &inputImageItem, planes);
}

static avifResult avifImageCreateAllocate(avifImage ** sampleTransformedImage, const avifImage * reference, uint32_t numBits, avifPlanesFlag planes)
{
    *sampleTransformedImage = avifImageCreate(reference->width, reference->height, numBits, reference->yuvFormat);
    AVIF_CHECKERR(*sampleTransformedImage != NULL, AVIF_RESULT_OUT_OF_MEMORY);
    return avifImageAllocatePlanes(*sampleTransformedImage, planes);
}

// Finds the encoded base image and decodes it. Callers of this function must free
// *codec and *decodedBaseImage if not null, whether the function succeeds or not.
static avifResult avifEncoderDecodeSatoBaseImage(avifEncoder * encoder,
                                                 const avifImage * original,
                                                 uint32_t numBits,
                                                 avifPlanesFlag planes,
                                                 avifCodec ** codec,
                                                 avifImage ** decodedBaseImage)
{
    avifDecodeSample sample;
    memset(&sample, 0, sizeof(sample));
    sample.spatialID = AVIF_SPATIAL_ID_UNSET;

    // Find the encoded bytes of the base image item.
    for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
        avifEncoderItem * item = &encoder->data->items.item[itemIndex];
        if ((item->itemCategory != AVIF_ITEM_COLOR || planes != AVIF_PLANES_YUV) &&
            (item->itemCategory != AVIF_ITEM_ALPHA || planes != AVIF_PLANES_A)) {
            continue;
        }

        AVIF_ASSERT_OR_RETURN(item->encodeOutput != NULL); // TODO: Support grids?
        AVIF_ASSERT_OR_RETURN(item->encodeOutput->samples.count == 1);
        AVIF_ASSERT_OR_RETURN(item->encodeOutput->samples.sample[0].data.size != 0);
        AVIF_ASSERT_OR_RETURN(sample.data.size == 0); // There should be only one base item.
        sample.data.data = item->encodeOutput->samples.sample[0].data.data;
        sample.data.size = item->encodeOutput->samples.sample[0].data.size;
    }
    AVIF_ASSERT_OR_RETURN(sample.data.size != 0); // There should be at least one base item.

    AVIF_CHECKRES(avifCodecCreate(AVIF_CODEC_CHOICE_AUTO, AVIF_CODEC_FLAG_CAN_DECODE, codec));
    (*codec)->diag = &encoder->diag;
    (*codec)->maxThreads = encoder->maxThreads;
    (*codec)->imageSizeLimit = AVIF_DEFAULT_IMAGE_SIZE_LIMIT;
    AVIF_CHECKRES(avifImageCreateAllocate(decodedBaseImage, original, numBits, planes));
    avifBool isLimitedRangeAlpha = AVIF_FALSE; // Ignored.
    AVIF_CHECKERR((*codec)->getNextImage(*codec, &sample, planes == AVIF_PLANES_A, &isLimitedRangeAlpha, *decodedBaseImage),
                  AVIF_RESULT_ENCODE_SAMPLE_TRANSFORM_FAILED);
    return AVIF_RESULT_OK;
}

static avifResult avifEncoderCreateSatoImage(avifEncoder * encoder,
                                             const avifEncoderItem * item,
                                             avifBool itemWillBeEncodedLosslessly,
                                             const avifImage * image,
                                             avifImage ** sampleTransformedImage)
{
    const avifPlanesFlag planes = avifIsAlpha(item->itemCategory) ? AVIF_PLANES_A : AVIF_PLANES_YUV;
    // The first image item used as input to the 'sato' Sample Transform derived image item.
    avifBool isBase = item->itemCategory == AVIF_ITEM_COLOR || item->itemCategory == AVIF_ITEM_ALPHA;
    if (!isBase) {
        // The second image item used as input to the 'sato' Sample Transform derived image item.
        AVIF_ASSERT_OR_RETURN(item->itemCategory >= AVIF_SAMPLE_TRANSFORM_MIN_CATEGORY &&
                              item->itemCategory <= AVIF_SAMPLE_TRANSFORM_MAX_CATEGORY);
    }

    if (encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_8B_8B) {
        if (isBase) {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 8, planes));
            AVIF_CHECKRES(avifImageApplyImgOpConst(*sampleTransformedImage, image, AVIF_SAMPLE_TRANSFORM_DIVIDE, 256, planes));
        } else {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 8, planes));
            AVIF_CHECKRES(avifImageApplyImgOpConst(*sampleTransformedImage, image, AVIF_SAMPLE_TRANSFORM_AND, 255, planes));
        }
    } else if (encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_12B_4B) {
        if (isBase) {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 12, planes));
            AVIF_CHECKRES(avifImageApplyImgOpConst(*sampleTransformedImage, image, AVIF_SAMPLE_TRANSFORM_DIVIDE, 16, planes));
        } else {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 8, planes));
            AVIF_CHECKRES(avifImageApplyImgOpConst(*sampleTransformedImage, image, AVIF_SAMPLE_TRANSFORM_AND, 15, planes));
            // AVIF only supports 8, 10 or 12-bit image items. Scale the samples to fit the range.
            // Note: The samples could be encoded as is without being shifted left before encoding,
            //       but they would not be shifted right after decoding either. Right shifting after
            //       decoding provides a guarantee on the range of values and on the lack of integer
            //       overflow, so it is safer to do these extra steps.
            //       It also makes more sense from a compression point-of-view to use the full range.
            // Transform in-place.
            AVIF_CHECKRES(
                avifImageApplyImgOpConst(*sampleTransformedImage, *sampleTransformedImage, AVIF_SAMPLE_TRANSFORM_PRODUCT, 16, planes));
            if (!itemWillBeEncodedLosslessly) {
                // Small loss at encoding could be amplified by the truncation caused by the right
                // shift after decoding. Offset sample values now, before encoding, to round rather
                // than floor the samples shifted after decoding.
                // Note: Samples were just left shifted by numShiftedBits, so adding less than
                //       (1<<numShiftedBits) will not trigger any integer overflow.
                // Transform in-place.
                AVIF_CHECKRES(
                    avifImageApplyImgOpConst(*sampleTransformedImage, *sampleTransformedImage, AVIF_SAMPLE_TRANSFORM_SUM, 7, planes));
            }
        }
    } else {
        AVIF_CHECKERR(encoder->sampleTransformRecipe == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_EXTENSION_12B_8B_OVERLAP_4B,
                      AVIF_RESULT_NOT_IMPLEMENTED);
        if (isBase) {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 12, planes));
            AVIF_CHECKRES(avifImageApplyImgOpConst(*sampleTransformedImage, image, AVIF_SAMPLE_TRANSFORM_DIVIDE, 16, planes));
        } else {
            AVIF_CHECKRES(avifImageCreateAllocate(sampleTransformedImage, image, 8, planes));
            avifCodec * codec = NULL;
            avifImage * decodedBaseImage = NULL;
            avifResult result = avifEncoderDecodeSatoBaseImage(encoder, image, 12, planes, &codec, &decodedBaseImage);
            if (result == AVIF_RESULT_OK) {
                // decoded = main*16+hidden-128 so hidden = clamp_8b(original-main*16+128). Postfix notation.
                const avifSampleTransformToken tokens[] = { { AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX, 0, /*inputImageItemIndex=*/1 },
                                                            { AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX, 0, /*inputImageItemIndex=*/2 },
                                                            { AVIF_SAMPLE_TRANSFORM_CONSTANT, /*constant=*/16, 0 },
                                                            { AVIF_SAMPLE_TRANSFORM_PRODUCT, 0, 0 },
                                                            { AVIF_SAMPLE_TRANSFORM_DIFFERENCE, 0, 0 },
                                                            { AVIF_SAMPLE_TRANSFORM_CONSTANT, /*constant=*/128, 0 },
                                                            { AVIF_SAMPLE_TRANSFORM_SUM, 0, 0 } };
                // image is "original" (index 1) and decodedBaseImage is "main" (index 2) in the formula above.
                const avifImage * inputImageItems[] = { image, decodedBaseImage };
                result = avifImageApplyOperations(*sampleTransformedImage,
                                                  AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32,
                                                  /*numTokens=*/7,
                                                  tokens,
                                                  /*numInputImageItems=*/2,
                                                  inputImageItems,
                                                  planes);
            }
            if (decodedBaseImage) {
                avifImageDestroy(decodedBaseImage);
            }
            if (codec) {
                avifCodecDestroy(codec);
            }
            AVIF_CHECKRES(result);
        }
    }
    return AVIF_RESULT_OK;
}

static avifResult avifEncoderCreateBitDepthExtensionImage(avifEncoder * encoder,
                                                          const avifEncoderItem * item,
                                                          avifBool itemWillBeEncodedLosslessly,
                                                          const avifImage * image,
                                                          avifImage ** sampleTransformedImage)
{
    AVIF_ASSERT_OR_RETURN(image->depth == 16); // Other bit depths could be supported but for now it is 16-bit only.
    *sampleTransformedImage = NULL;
    const avifResult result = avifEncoderCreateSatoImage(encoder, item, itemWillBeEncodedLosslessly, image, sampleTransformedImage);
    if (result != AVIF_RESULT_OK && *sampleTransformedImage != NULL) {
        avifImageDestroy(*sampleTransformedImage);
    }
    return result;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM

static avifCodecType avifEncoderGetCodecType(const avifEncoder * encoder)
{}

// This function is called after every color frame is encoded. It returns AVIF_TRUE if a keyframe needs to be forced for the next
// alpha frame to be encoded, AVIF_FALSE otherwise.
static avifBool avifEncoderDataShouldForceKeyframeForAlpha(const avifEncoderData * data,
                                                           const avifEncoderItem * colorItem,
                                                           avifAddImageFlags addImageFlags)
{}

static avifResult avifGetErrorForItemCategory(avifItemCategory itemCategory)
{}

static uint32_t avifGridWidth(uint32_t gridCols, const avifImage * firstCell, const avifImage * bottomRightCell)
{}

static uint32_t avifGridHeight(uint32_t gridRows, const avifImage * firstCell, const avifImage * bottomRightCell)
{}

static avifResult avifValidateGrid(uint32_t gridCols,
                                   uint32_t gridRows,
                                   const avifImage * const * cellImages,
                                   avifBool validateGainMap,
                                   avifDiagnostics * diag)
{}

static avifResult avifEncoderAddImageInternal(avifEncoder * encoder,
                                              uint32_t gridCols,
                                              uint32_t gridRows,
                                              const avifImage * const * cellImages,
                                              uint64_t durationInTimescales,
                                              avifAddImageFlags addImageFlags)
{}

avifResult avifEncoderAddImage(avifEncoder * encoder, const avifImage * image, uint64_t durationInTimescales, avifAddImageFlags addImageFlags)
{}

avifResult avifEncoderAddImageGrid(avifEncoder * encoder,
                                   uint32_t gridCols,
                                   uint32_t gridRows,
                                   const avifImage * const * cellImages,
                                   avifAddImageFlags addImageFlags)
{}

static size_t avifEncoderFindExistingChunk(avifRWStream * s, size_t mdatStartOffset, const uint8_t * data, size_t size)
{}

static avifResult avifEncoderWriteMediaDataBox(avifEncoder * encoder,
                                               avifRWStream * s,
                                               avifEncoderItemReferenceArray * layeredColorItems,
                                               avifEncoderItemReferenceArray * layeredAlphaItems)
{}

static avifResult avifWriteAltrGroup(avifRWStream * s, uint32_t groupID, const avifEncoderItemIdArray * itemIDs)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_MINI)
// Returns true if the image can be encoded with a MinimizedImageBox instead of a full regular MetaBox.
static avifBool avifEncoderIsMiniCompatible(const avifEncoder * encoder)
{
    // The MinimizedImageBox ("mif3" brand) only supports non-layered, still images.
    if (encoder->extraLayerCount || (encoder->data->frames.count != 1)) {
        return AVIF_FALSE;
    }

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
    if (encoder->sampleTransformRecipe != AVIF_SAMPLE_TRANSFORM_NONE) {
        return AVIF_FALSE;
    }
#endif

    if (encoder->data->imageMetadata->width > (1 << 15) || encoder->data->imageMetadata->height > (1 << 15)) {
        return AVIF_FALSE;
    }
    if (encoder->data->imageMetadata->icc.size > (1 << 20) || encoder->data->imageMetadata->exif.size > (1 << 20) ||
        encoder->data->imageMetadata->xmp.size > (1 << 20)) {
        return AVIF_FALSE;
    }

    // 4:4:4, 4:2:2, 4:2:0 and 4:0:0 are supported by a MinimizedImageBox.
    if (encoder->data->imageMetadata->yuvFormat != AVIF_PIXEL_FORMAT_YUV444 &&
        encoder->data->imageMetadata->yuvFormat != AVIF_PIXEL_FORMAT_YUV422 &&
        encoder->data->imageMetadata->yuvFormat != AVIF_PIXEL_FORMAT_YUV420 &&
        encoder->data->imageMetadata->yuvFormat != AVIF_PIXEL_FORMAT_YUV400) {
        return AVIF_FALSE;
    }

    if (encoder->data->imageMetadata->colorPrimaries > 255 || encoder->data->imageMetadata->transferCharacteristics > 255 ||
        encoder->data->imageMetadata->matrixCoefficients > 255) {
        return AVIF_FALSE;
    }

    const avifEncoderItem * colorItem = NULL;
    for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
        avifEncoderItem * item = &encoder->data->items.item[itemIndex];

        // Grids are not supported by a MinimizedImageBox.
        if (item->gridCols || item->gridRows) {
            return AVIF_FALSE;
        }

        if (item->id == encoder->data->primaryItemID) {
            assert(!colorItem);
            colorItem = item;
            // main_item_data_size_minus_one so 2^28 inclusive.
            if (item->encodeOutput->samples.count != 1 || item->encodeOutput->samples.sample[0].data.size > (1 << 28)) {
                return AVIF_FALSE;
            }
            continue; // The primary item can be stored in the MinimizedImageBox.
        }
        if (item->itemCategory == AVIF_ITEM_ALPHA && item->irefToID == encoder->data->primaryItemID) {
            // alpha_item_data_size so 2^28 exclusive.
            if (item->encodeOutput->samples.count != 1 || item->encodeOutput->samples.sample[0].data.size >= (1 << 28)) {
                return AVIF_FALSE;
            }
            continue; // The alpha auxiliary item can be stored in the MinimizedImageBox.
        }
        if (!memcmp(item->type, "mime", 4) && !memcmp(item->infeName, "XMP", item->infeNameSize)) {
            assert(item->metadataPayload.size == encoder->data->imageMetadata->xmp.size);
            continue; // XMP metadata can be stored in the MinimizedImageBox.
        }
        if (!memcmp(item->type, "Exif", 4) && !memcmp(item->infeName, "Exif", item->infeNameSize)) {
            assert(item->metadataPayload.size == encoder->data->imageMetadata->exif.size + 4);
            const uint32_t exif_tiff_header_offset = *(uint32_t *)item->metadataPayload.data;
            if (exif_tiff_header_offset != 0) {
                return AVIF_FALSE;
            }
            continue; // Exif metadata can be stored in the MinimizedImageBox if exif_tiff_header_offset is 0.
        }

        // Items besides the colorItem, the alphaItem and Exif/XMP/ICC
        // metadata are not directly supported by the MinimizedImageBox.
        return AVIF_FALSE;
    }
    // A primary item is necessary.
    if (!colorItem) {
        return AVIF_FALSE;
    }
    return AVIF_TRUE;
}

static avifResult avifEncoderWriteMiniBox(avifEncoder * encoder, avifRWStream * s);

static avifResult avifEncoderWriteFileTypeBoxAndMetaBoxV1(avifEncoder * encoder, avifRWData * output)
{
    avifRWStream s;
    avifRWStreamStart(&s, output);

    avifBoxMarker ftyp;
    AVIF_CHECKRES(avifRWStreamWriteBox(&s, "ftyp", AVIF_BOX_SIZE_TBD, &ftyp));
    AVIF_CHECKRES(avifRWStreamWriteChars(&s, "mif3", 4)); // unsigned int(32) major_brand;
    AVIF_CHECKRES(avifRWStreamWriteChars(&s, "avif", 4)); // unsigned int(32) minor_version;
                                                          // unsigned int(32) compatible_brands[];
    avifRWStreamFinishBox(&s, ftyp);

    AVIF_CHECKRES(avifEncoderWriteMiniBox(encoder, &s));

    avifRWStreamFinishWrite(&s);
    return AVIF_RESULT_OK;
}

static avifResult avifEncoderWriteMiniBox(avifEncoder * encoder, avifRWStream * s)
{
    const avifEncoderItem * colorItem = NULL;
    const avifEncoderItem * alphaItem = NULL;
    for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
        avifEncoderItem * item = &encoder->data->items.item[itemIndex];
        if (item->id == encoder->data->primaryItemID) {
            AVIF_ASSERT_OR_RETURN(!colorItem);
            colorItem = item;
        } else if (item->itemCategory == AVIF_ITEM_ALPHA && item->irefToID == encoder->data->primaryItemID) {
            AVIF_ASSERT_OR_RETURN(!alphaItem);
            alphaItem = item;
        }
    }

    AVIF_ASSERT_OR_RETURN(colorItem);
    const avifRWData * colorData = &colorItem->encodeOutput->samples.sample[0].data;
    const avifRWData * alphaData = alphaItem ? &alphaItem->encodeOutput->samples.sample[0].data : NULL;

    const avifImage * const image = encoder->data->imageMetadata;

    const avifBool hasAlpha = alphaItem != NULL;
    const avifBool alphaIsPremultiplied = encoder->data->imageMetadata->alphaPremultiplied;
    const avifBool hasHdr = AVIF_FALSE;     // Not implemented.
    const avifBool hasGainmap = AVIF_FALSE; // Not implemented.
    const avifBool hasIcc = image->icc.size != 0;
    const uint32_t chromaSubsampling = image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400   ? 0
                                       : image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420 ? 1
                                       : image->yuvFormat == AVIF_PIXEL_FORMAT_YUV422 ? 2
                                                                                      : 3;

    const avifColorPrimaries defaultColorPrimaries = hasIcc ? AVIF_COLOR_PRIMARIES_UNSPECIFIED : AVIF_COLOR_PRIMARIES_BT709;
    const avifTransferCharacteristics defaultTransferCharacteristics = hasIcc ? AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED
                                                                              : AVIF_TRANSFER_CHARACTERISTICS_SRGB;
    const avifMatrixCoefficients defaultMatrixCoefficients = chromaSubsampling == 0 ? AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED
                                                                                    : AVIF_MATRIX_COEFFICIENTS_BT601;
    const avifBool hasExplicitCicp = image->colorPrimaries != defaultColorPrimaries ||
                                     image->transferCharacteristics != defaultTransferCharacteristics ||
                                     image->matrixCoefficients != defaultMatrixCoefficients;

    const avifBool floatFlag = AVIF_FALSE;
    const avifBool fullRange = image->yuvRange == AVIF_RANGE_FULL;

    // In AV1, the chroma_sample_position syntax element is not present for the YUV 4:2:2 format.
    // Assume that AV1 uses the same 4:2:2 chroma sample location as HEVC and VVC (colocated).
    if (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV420 && image->yuvChromaSamplePosition != AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN) {
        avifDiagnosticsPrintf(&encoder->diag,
                              "YUV chroma sample position %d is only supported with 4:2:0 YUV format in AV1",
                              image->yuvChromaSamplePosition);
        return AVIF_RESULT_INVALID_ARGUMENT;
    }
    // For the YUV 4:2:0 format, assume centered sample position unless specified otherwise.
    // This is consistent with the behavior in read.c.
    const avifBool chromaIsHorizontallyCentered = image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420 &&
                                                  image->yuvChromaSamplePosition != AVIF_CHROMA_SAMPLE_POSITION_VERTICAL &&
                                                  image->yuvChromaSamplePosition != AVIF_CHROMA_SAMPLE_POSITION_COLOCATED;
    const avifBool chromaIsVerticallyCentered = image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420 &&
                                                image->yuvChromaSamplePosition != AVIF_CHROMA_SAMPLE_POSITION_COLOCATED;

    const uint32_t orientationMinus1 = avifImageIrotImirToExifOrientation(image) - 1;

    const avifBool hasExplicitCodecTypes = AVIF_FALSE; // 'av01' and 'av1C' known from 'avif' minor_version field of FileTypeBox.

    const uint32_t smallDimensionsFlag = image->width <= (1 << 7) && image->height <= (1 << 7);
    const uint32_t codecConfigSize = 4; // 'av1C' always uses 4 bytes.
    const uint32_t fewCodecConfigBytesFlag = codecConfigSize < (1 << 3);
    const uint32_t fewItemDataBytesFlag = colorData->size <= (1 << 15) && (!alphaData || alphaData->size < (1 << 15));
    const uint32_t fewMetadataBytesFlag = image->icc.size <= (1 << 10) && image->exif.size <= (1 << 10) && image->xmp.size <= (1 << 10);

    avifBoxMarker mini;
    AVIF_CHECKRES(avifRWStreamWriteBox(s, "mini", AVIF_BOX_SIZE_TBD, &mini));
    AVIF_CHECKRES(avifRWStreamWriteBits(s, 0, 2)); // bit(2) version = 0;

    // flags
    AVIF_CHECKRES(avifRWStreamWriteBits(s, hasExplicitCodecTypes, 1)); // bit(1) explicit_codec_types_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, floatFlag, 1));             // bit(1) float_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, fullRange, 1));             // bit(1) full_range_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, alphaItem != 0, 1));        // bit(1) alpha_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, hasExplicitCicp, 1));       // bit(1) explicit_cicp_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, hasHdr, 1));                // bit(1) hdr_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, hasIcc, 1));                // bit(1) icc_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, image->exif.size != 0, 1)); // bit(1) exif_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, image->xmp.size != 0, 1));  // bit(1) xmp_flag;

    AVIF_CHECKRES(avifRWStreamWriteBits(s, chromaSubsampling, 2)); // bit(2) chroma_subsampling;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, orientationMinus1, 3)); // bit(3) orientation_minus1;

    // Spatial extents
    AVIF_CHECKRES(avifRWStreamWriteBits(s, smallDimensionsFlag, 1));                         // bit(1) small_dimensions_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, image->width - 1, smallDimensionsFlag ? 7 : 15)); // unsigned int(small_dimensions_flag ? 7 : 15) width_minus1;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, image->height - 1, smallDimensionsFlag ? 7 : 15)); // unsigned int(small_dimensions_flag ? 7 : 15) height_minus1;

    // Pixel information
    if (chromaSubsampling == 1 || chromaSubsampling == 2) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, chromaIsHorizontallyCentered, 1)); // bit(1) chroma_is_horizontally_centered;
    }
    if (chromaSubsampling == 1) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, chromaIsVerticallyCentered, 1)); // bit(1) chroma_is_vertically_centered;
    }

    if (floatFlag) {
        // bit(2) bit_depth_log2_minus4;
        AVIF_ASSERT_OR_RETURN(AVIF_FALSE);
    } else {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, image->depth > 8, 1)); // bit(1) high_bit_depth_flag;
        if (image->depth > 8) {
            AVIF_CHECKRES(avifRWStreamWriteBits(s, image->depth - 9, 3)); // bit(3) bit_depth_minus9;
        }
    }

    if (alphaItem) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, alphaIsPremultiplied, 1)); // bit(1) alpha_is_premultiplied;
    }

    // Colour properties
    if (hasExplicitCicp) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, image->colorPrimaries, 8));          // bit(8) colour_primaries;
        AVIF_CHECKRES(avifRWStreamWriteBits(s, image->transferCharacteristics, 8)); // bit(8) transfer_characteristics;
        if (chromaSubsampling != 0) {
            AVIF_CHECKRES(avifRWStreamWriteBits(s, image->matrixCoefficients, 8)); // bit(8) matrix_coefficients;
        } else {
            AVIF_CHECKERR(image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED, AVIF_RESULT_ENCODE_COLOR_FAILED);
        }
    }

    if (hasExplicitCodecTypes) {
        // bit(32) infe_type;
        // bit(32) codec_config_type;
        AVIF_ASSERT_OR_RETURN(AVIF_FALSE);
    }

    // High Dynamic Range properties
    if (hasHdr) {
        // bit(1) gainmap_flag;
        // if (gainmap_flag) {
        //     unsigned int(small_dimensions_flag ? 7 : 15) gainmap_width_minus1;
        //     unsigned int(small_dimensions_flag ? 7 : 15) gainmap_height_minus1;
        //     bit(8) gainmap_matrix_coefficients;
        //     bit(1) gainmap_full_range_flag;
        //     bit(2) gainmap_chroma_subsampling;
        //     if (gainmap_chroma_subsampling == 1 || gainmap_chroma_subsampling == 2)
        //         bit(1) gainmap_chroma_is_horizontally_centered;
        //     if (gainmap_chroma_subsampling == 1)
        //         bit(1) gainmap_chroma_is_vertically_centered;
        //     bit(1) gainmap_float_flag;
        //     if (gainmap_float_flag)
        //         bit(2) gainmap_bit_depth_log2_minus4;
        //     else {
        //         bit(1) gainmap_high_bit_depth_flag;
        //         if (gainmap_high_bit_depth_flag)
        //             bit(3) gainmap_bit_depth_minus9;
        //     }
        //     bit(1) tmap_icc_flag;
        //     bit(1) tmap_explicit_cicp_flag;
        //     if (tmap_explicit_cicp_flag) {
        //         bit(8) tmap_colour_primaries;
        //         bit(8) tmap_transfer_characteristics;
        //         bit(8) tmap_matrix_coefficients;
        //         bit(1) tmap_full_range_flag;
        //     }
        //     else {
        //         tmap_colour_primaries = 1;
        //         tmap_transfer_characteristics = 13;
        //         tmap_matrix_coefficients = 6;
        //         tmap_full_range_flag = 1;
        //     }
        // }
        // bit(1) clli_flag;
        // bit(1) mdcv_flag;
        // bit(1) cclv_flag;
        // bit(1) amve_flag;
        // bit(1) reve_flag;
        // bit(1) ndwt_flag;
        // if (clli_flag)
        //     ContentLightLevel clli;
        // if (mdcv_flag)
        //     MasteringDisplayColourVolume mdcv;
        // if (cclv_flag)
        //     ContentColourVolume cclv;
        // if (amve_flag)
        //     AmbientViewingEnvironment amve;
        // if (reve_flag)
        //     ReferenceViewingEnvironment reve;
        // if (ndwt_flag)
        //     NominalDiffuseWhite ndwt;
        // if (gainmap_flag) {
        //     bit(1) tmap_clli_flag;
        //     bit(1) tmap_mdcv_flag;
        //     bit(1) tmap_cclv_flag;
        //     bit(1) tmap_amve_flag;
        //     bit(1) tmap_reve_flag;
        //     bit(1) tmap_ndwt_flag;
        //     if (tmap_clli_flag)
        //         ContentLightLevel tmap_clli;
        //     if (tmap_mdcv_flag)
        //         MasteringDisplayColourVolume tmap_mdcv;
        //     if (tmap_cclv_flag)
        //         ContentColourVolume tmap_cclv;
        //     if (tmap_amve_flag)
        //         AmbientViewingEnvironment tmap_amve;
        //     if (tmap_reve_flag)
        //         ReferenceViewingEnvironment tmap_reve;
        //     if (tmap_ndwt_flag)
        //         NominalDiffuseWhite tmap_ndwt;
        // }
        return AVIF_RESULT_NOT_IMPLEMENTED;
    }

    // Chunk sizes
    if (hasIcc || image->exif.size || image->xmp.size || (hasHdr && hasGainmap)) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, fewMetadataBytesFlag, 1)); // bit(1) few_metadata_bytes_flag;
    }
    AVIF_CHECKRES(avifRWStreamWriteBits(s, fewCodecConfigBytesFlag, 1)); // bit(1) few_codec_config_bytes_flag;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, fewItemDataBytesFlag, 1));    // bit(1) few_item_data_bytes_flag;

    if (hasIcc) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, (uint32_t)image->icc.size - 1, fewMetadataBytesFlag ? 10 : 20)); // unsigned int(few_metadata_bytes_flag ? 10 : 20) icc_data_size_minus1;
    }
    // if (hdr_flag && gainmap_flag && tmap_icc_flag)
    //     unsigned int(few_metadata_bytes_flag ? 10 : 20) tmap_icc_data_size_minus1;

    // if (hdr_flag && gainmap_flag)
    //     unsigned int(few_metadata_bytes_flag ? 10 : 20) gainmap_metadata_size;
    // if (hdr_flag && gainmap_flag)
    //     unsigned int(few_item_data_bytes_flag ? 15 : 28) gainmap_item_data_size;
    // if (hdr_flag && gainmap_flag && gainmap_item_data_size > 0)
    //     unsigned int(few_codec_config_bytes_flag ? 3 : 12) gainmap_item_codec_config_size;

    AVIF_CHECKRES(avifRWStreamWriteBits(s, codecConfigSize, fewCodecConfigBytesFlag ? 3 : 12)); // unsigned int(few_codec_config_bytes_flag ? 3 : 12) main_item_codec_config_size;
    AVIF_CHECKRES(avifRWStreamWriteBits(s, (uint32_t)colorData->size - 1, fewItemDataBytesFlag ? 15 : 28)); // unsigned int(few_item_data_bytes_flag ? 15 : 28) main_item_data_size_minus1;

    if (hasAlpha) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, (uint32_t)alphaData->size, fewItemDataBytesFlag ? 15 : 28)); // unsigned int(few_item_data_bytes_flag ? 15 : 28) alpha_item_data_size;
    }
    if (hasAlpha && alphaData->size != 0) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, codecConfigSize, fewCodecConfigBytesFlag ? 3 : 12)); // unsigned int(few_codec_config_bytes_flag ? 3 : 12) alpha_item_codec_config_size;
    }

    if (image->exif.size) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, (uint32_t)image->exif.size - 1, fewMetadataBytesFlag ? 10 : 20)); // unsigned int(few_metadata_bytes_flag ? 10 : 20) exif_data_size_minus_one;
    }
    if (image->xmp.size) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, (uint32_t)image->xmp.size - 1, fewMetadataBytesFlag ? 10 : 20)); // unsigned int(few_metadata_bytes_flag ? 10 : 20) xmp_data_size_minus_one;
    }

    // trailing_bits(); // bit padding till byte alignment
    if (s->numUsedBitsInPartialByte != 0) {
        AVIF_CHECKRES(avifRWStreamWriteBits(s, 0, 8 - s->numUsedBitsInPartialByte));
    }
    const size_t headerSize = avifRWStreamOffset(s);

    // Chunks
    if (hasAlpha && alphaData->size != 0 && codecConfigSize != 0) {
        AVIF_CHECKRES(writeCodecConfig(s, &alphaItem->av1C)); // unsigned int(8) alpha_item_codec_config[alpha_item_codec_config_size];
    }
    // if (hdr_flag && gainmap_flag && gainmap_item_codec_config_size > 0)
    //     unsigned int(8) gainmap_item_codec_config[gainmap_item_codec_config_size];
    if (codecConfigSize > 0) {
        AVIF_CHECKRES(writeCodecConfig(s, &colorItem->av1C)); // unsigned int(8) main_item_codec_config[main_item_codec_config_size];
    }

    if (hasIcc) {
        AVIF_CHECKRES(avifRWStreamWrite(s, image->icc.data, image->icc.size)); // unsigned int(8) icc_data[icc_data_size_minus1 + 1];
    }
    // if (hdr_flag && gainmap_flag && tmap_icc_flag)
    //     unsigned int(8) tmap_icc_data[tmap_icc_data_size_minus1 + 1];
    // if (hdr_flag && gainmap_flag && gainmap_metadata_size > 0)
    //     unsigned int(8) gainmap_metadata[gainmap_metadata_size];

    if (hasAlpha && alphaData->size != 0) {
        AVIF_CHECKRES(avifRWStreamWrite(s, alphaData->data, alphaData->size)); // unsigned int(8) alpha_item_data[alpha_item_data_size];
    }
    // if (hdr_flag && gainmap_flag && gainmap_item_data_size > 0)
    //     unsigned int(8) gainmap_item_data[gainmap_item_data_size];

    AVIF_CHECKRES(avifRWStreamWrite(s, colorData->data, colorData->size)); // unsigned int(8) main_item_data[main_item_data_size_minus1 + 1];

    if (image->exif.size) {
        AVIF_CHECKRES(avifRWStreamWrite(s, image->exif.data, image->exif.size)); // unsigned int(8) exif_data[exif_data_size_minus1 + 1];
    }
    if (image->xmp.size) {
        AVIF_CHECKRES(avifRWStreamWrite(s, image->xmp.data, image->xmp.size)); // unsigned int(8) xmp_data[xmp_data_size_minus1 + 1];
    }

    AVIF_ASSERT_OR_RETURN(avifRWStreamOffset(s) - headerSize == (hasAlpha ? codecConfigSize : 0) + codecConfigSize +
                                                                    image->icc.size + (hasAlpha ? alphaData->size : 0) +
                                                                    colorData->size + image->exif.size + image->xmp.size);
    avifRWStreamFinishBox(s, mini);
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_MINI

static avifResult avifRWStreamWriteProperties(avifItemPropertyDedup * const dedup,
                                              avifRWStream * const s,
                                              const avifEncoder * const encoder,
                                              const avifImage * const imageMetadata,
                                              const avifImage * const altImageMetadata)
{}

avifResult avifEncoderFinish(avifEncoder * encoder, avifRWData * output)
{}

avifResult avifEncoderWrite(avifEncoder * encoder, const avifImage * image, avifRWData * output)
{}

// Implementation of section 2.3.3 of AV1 Codec ISO Media File Format Binding specification v1.2.0.
// See https://aomediacodec.github.io/av1-isobmff/v1.2.0.html#av1codecconfigurationbox-syntax.
static avifResult writeCodecConfig(avifRWStream * s, const avifCodecConfigurationBox * cfg)
{}

static avifResult writeConfigBox(avifRWStream * s, const avifCodecConfigurationBox * cfg, const char * configPropName)
{}