chromium/third_party/libavif/src/src/read.c

// Copyright 2019 Joe Drago. All rights reserved.
// SPDX-License-Identifier: BSD-2-Clause

#include "avif/internal.h"

#include <assert.h>
#include <ctype.h>
#include <inttypes.h>
#include <limits.h>
#include <math.h>
#include <stdio.h>
#include <string.h>

#define AUXTYPE_SIZE
#define CONTENTTYPE_SIZE

// class VisualSampleEntry(codingname) extends SampleEntry(codingname) {
//     unsigned int(16) pre_defined = 0;
//     const unsigned int(16) reserved = 0;
//     unsigned int(32)[3] pre_defined = 0;
//     unsigned int(16) width;
//     unsigned int(16) height;
//     template unsigned int(32) horizresolution = 0x00480000; // 72 dpi
//     template unsigned int(32) vertresolution = 0x00480000;  // 72 dpi
//     const unsigned int(32) reserved = 0;
//     template unsigned int(16) frame_count = 1;
//     string[32] compressorname;
//     template unsigned int(16) depth = 0x0018;
//     int(16) pre_defined = -1;
//     // other boxes from derived specifications
//     CleanApertureBox clap;    // optional
//     PixelAspectRatioBox pasp; // optional
// }
static const size_t VISUALSAMPLEENTRY_SIZE =;

// The only supported ipma box values for both version and flags are [0,1], so there technically
// can't be more than 4 unique tuples right now.
#define MAX_IPMA_VERSION_AND_FLAGS_SEEN

// ---------------------------------------------------------------------------
// AVIF codec type (AV1 or AV2)

static avifCodecType avifGetCodecType(const uint8_t * fourcc)
{}

static const char * avifGetConfigurationPropertyName(avifCodecType codecType)
{}

// ---------------------------------------------------------------------------
// Box data structures

avifBrand;
AVIF_ARRAY_DECLARE(avifBrandArray, avifBrand, } ;

// ftyp
avifFileType;

// ispe
avifImageSpatialExtents;

// auxC
avifAuxiliaryType;

// infe mime content_type
avifContentType;

// colr
avifColourInformationBox;

#define MAX_PIXI_PLANE_DEPTHS
avifPixelInformationProperty;

avifOperatingPointSelectorProperty;

avifLayerSelectorProperty;

avifAV1LayeredImageIndexingProperty;

// ---------------------------------------------------------------------------
// Top-level structures

struct avifMeta;

// Temporary storage for ipco/stsd contents until they can be associated and memcpy'd to an avifDecoderItem
avifProperty;
AVIF_ARRAY_DECLARE(avifPropertyArray, avifProperty, } ;

// Finds the first property of a given type.
static const avifProperty * avifPropertyArrayFind(const avifPropertyArray * properties, const char * type)
{}

AVIF_ARRAY_DECLARE(avifExtentArray, avifExtent, } ;

// one "item" worth for decoding (all iref, iloc, iprp, etc refer to one of these)
avifDecoderItem;
AVIF_ARRAY_DECLARE(avifDecoderItemArray, avifDecoderItem } ;

// grid storage
avifImageGrid;

// ---------------------------------------------------------------------------
// avifTrack

avifSampleTableChunk;
AVIF_ARRAY_DECLARE(avifSampleTableChunkArray, avifSampleTableChunk, } ;

avifSampleTableSampleToChunk;
AVIF_ARRAY_DECLARE(avifSampleTableSampleToChunkArray, avifSampleTableSampleToChunk, } ;

avifSampleTableSampleSize;
AVIF_ARRAY_DECLARE(avifSampleTableSampleSizeArray, avifSampleTableSampleSize, } ;

avifSampleTableTimeToSample;
AVIF_ARRAY_DECLARE(avifSampleTableTimeToSampleArray, avifSampleTableTimeToSample, } ;

avifSyncSample;
AVIF_ARRAY_DECLARE(avifSyncSampleArray, avifSyncSample, } ;

avifSampleDescription;
AVIF_ARRAY_DECLARE(avifSampleDescriptionArray, avifSampleDescription, } ;

avifSampleTable;

static void avifSampleTableDestroy(avifSampleTable * sampleTable);

static avifSampleTable * avifSampleTableCreate(void)
{}

static void avifSampleTableDestroy(avifSampleTable * sampleTable)
{}

static uint32_t avifSampleTableGetImageDelta(const avifSampleTable * sampleTable, uint32_t imageIndex)
{}

static avifCodecType avifSampleTableGetCodecType(const avifSampleTable * sampleTable)
{}

static uint32_t avifCodecConfigurationBoxGetDepth(const avifCodecConfigurationBox * av1C)
{}

// This is used as a hint to validating the clap box in avifDecoderItemValidateProperties.
static avifPixelFormat avifCodecConfigurationBoxGetFormat(const avifCodecConfigurationBox * av1C)
{}

static const avifPropertyArray * avifSampleTableGetProperties(const avifSampleTable * sampleTable, avifCodecType codecType)
{}

// one video track ("trak" contents)
avifTrack;
AVIF_ARRAY_DECLARE(avifTrackArray, avifTrack, } ;

// ---------------------------------------------------------------------------
// avifCodecDecodeInput

avifCodecDecodeInput * avifCodecDecodeInputCreate(void)
{}

void avifCodecDecodeInputDestroy(avifCodecDecodeInput * decodeInput)
{}

// Returns how many samples are in the chunk.
static uint32_t avifGetSampleCountOfChunk(const avifSampleTableSampleToChunkArray * sampleToChunks, uint32_t chunkIndex)
{}

static avifResult avifCodecDecodeInputFillFromSampleTable(avifCodecDecodeInput * decodeInput,
                                                          avifSampleTable * sampleTable,
                                                          const uint32_t imageCountLimit,
                                                          const uint64_t sizeHint,
                                                          avifDiagnostics * diag)
{}

static avifResult avifCodecDecodeInputFillFromDecoderItem(avifCodecDecodeInput * decodeInput,
                                                          avifDecoderItem * item,
                                                          avifBool allowProgressive,
                                                          const uint32_t imageCountLimit,
                                                          const uint64_t sizeHint,
                                                          avifDiagnostics * diag)
{}

// ---------------------------------------------------------------------------
// Helper macros / functions

#define BEGIN_STREAM(VARNAME, PTR, SIZE, DIAG, CONTEXT)

// Use this to keep track of whether or not a child box that must be unique (0 or 1 present) has
// been seen yet, when parsing a parent box. If the "seen" bit is already set for a given box when
// it is encountered during parse, an error is thrown. Which bit corresponds to which box is
// dictated entirely by the calling function.
static avifBool uniqueBoxSeen(uint32_t * uniqueBoxFlags, uint32_t whichFlag, const char * parentBoxType, const char * boxType, avifDiagnostics * diagnostics)
{}

// ---------------------------------------------------------------------------
// avifDecoderData

avifTile;
AVIF_ARRAY_DECLARE(avifTileArray, avifTile, } ;

// This holds one "meta" box (from the BMFF and HEIF standards) worth of relevant-to-AVIF information.
// * If a meta box is parsed from the root level of the BMFF, it can contain the information about
//   "items" which might be color planes, alpha planes, or EXIF or XMP metadata.
// * If a meta box is parsed from inside of a track ("trak") box, any metadata (EXIF/XMP) items inside
//   of that box are implicitly associated with that track.
avifMeta;

static void avifMetaDestroy(avifMeta * meta);

static avifMeta * avifMetaCreate(void)
{}

static void avifMetaDestroy(avifMeta * meta)
{}

static avifResult avifCheckItemID(const char * boxFourcc, uint32_t itemID, avifDiagnostics * diag)
{}

static avifResult avifMetaFindOrCreateItem(avifMeta * meta, uint32_t itemID, avifDecoderItem ** item)
{}

// A group of AVIF tiles in an image item, such as a single tile or a grid of multiple tiles.
avifTileInfo;

avifDecoderData;

static void avifDecoderDataDestroy(avifDecoderData * data);

static avifDecoderData * avifDecoderDataCreate(void)
{}

static void avifDecoderDataResetCodec(avifDecoderData * data)
{}

static avifTile * avifDecoderDataCreateTile(avifDecoderData * data, avifCodecType codecType, uint32_t width, uint32_t height, uint8_t operatingPoint)
{}

static avifTrack * avifDecoderDataCreateTrack(avifDecoderData * data)
{}

static void avifDecoderDataClearTiles(avifDecoderData * data)
{}

static void avifDecoderDataDestroy(avifDecoderData * data)
{}

// This returns the max extent that has to be read in order to decode this item. If
// the item is stored in an idat, the data has already been read during Parse() and
// this function will return AVIF_RESULT_OK with a 0-byte extent.
static avifResult avifDecoderItemMaxExtent(const avifDecoderItem * item, const avifDecodeSample * sample, avifExtent * outExtent)
{}

static uint8_t avifDecoderItemOperatingPoint(const avifDecoderItem * item)
{}

static avifResult avifDecoderItemValidateProperties(const avifDecoderItem * item,
                                                    const char * configPropName,
                                                    avifDiagnostics * diag,
                                                    const avifStrictFlags strictFlags)
{}

static avifResult avifDecoderItemRead(avifDecoderItem * item,
                                      avifIO * io,
                                      avifROData * outData,
                                      size_t offset,
                                      size_t partialByteCount,
                                      avifDiagnostics * diag)
{}

// Returns the avifCodecType of the first tile of the gridItem.
static avifCodecType avifDecoderItemGetGridCodecType(const avifDecoderItem * gridItem)
{}

// Fills the dimgIdxToItemIdx array with a mapping from each 0-based tile index in the 'dimg' reference
// to its corresponding 0-based index in the avifMeta::items array.
static avifResult avifFillDimgIdxToItemIdxArray(uint32_t * dimgIdxToItemIdx, uint32_t numExpectedTiles, const avifDecoderItem * gridItem)
{}

// Creates the tiles and associate them to the items in the order of the 'dimg' association.
static avifResult avifDecoderGenerateImageGridTiles(avifDecoder * decoder,
                                                    avifDecoderItem * gridItem,
                                                    avifItemCategory itemCategory,
                                                    const uint32_t * dimgIdxToItemIdx,
                                                    uint32_t numTiles)
{}

// Allocates the dstImage. Also verifies some spec compliance rules for grids, if relevant.
static avifResult avifDecoderDataAllocateImagePlanes(avifDecoderData * data, const avifTileInfo * info, avifImage * dstImage)
{}

// Copies over the pixels from the tile into dstImage.
// Verifies that the relevant properties of the tile match those of the first tile in case of a grid.
static avifResult avifDecoderDataCopyTileToImage(avifDecoderData * data,
                                                 const avifTileInfo * info,
                                                 avifImage * dstImage,
                                                 const avifTile * tile,
                                                 unsigned int tileIndex)
{}

// If colorId == 0 (a sentinel value as item IDs must be nonzero), accept any found EXIF/XMP metadata. Passing in 0
// is used when finding metadata in a meta box embedded in a trak box, as any items inside of a meta box that is
// inside of a trak box are implicitly associated to the track.
static avifResult avifDecoderFindMetadata(avifDecoder * decoder, avifMeta * meta, avifImage * image, uint32_t colorId)
{}

// ---------------------------------------------------------------------------
// URN

static avifBool isAlphaURN(const char * urn)
{}

// ---------------------------------------------------------------------------
// BMFF Parsing

static avifBool avifParseHandlerBox(const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemLocationBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseImageGridBox(avifImageGrid * grid,
                                      const uint8_t * raw,
                                      size_t rawLen,
                                      uint32_t imageSizeLimit,
                                      uint32_t imageDimensionLimit,
                                      avifDiagnostics * diag)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP)

static avifBool avifParseGainMapMetadata(avifGainMapMetadata * metadata, avifROStream * s)
{}

// If the gain map's version or minimum_version tag is not supported, returns AVIF_RESULT_NOT_IMPLEMENTED.
static avifResult avifParseToneMappedImageBox(avifGainMapMetadata * metadata, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}
#endif // AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
// bit_depth is assumed to be 2 (32-bit).
static avifResult avifParseSampleTransformTokens(avifROStream * s, avifSampleTransformExpression * expression)
{
    uint8_t tokenCount;
    AVIF_CHECK(avifROStreamRead(s, &tokenCount, /*size=*/1)); // unsigned int(8) token_count;
    AVIF_CHECKERR(tokenCount != 0, AVIF_RESULT_BMFF_PARSE_FAILED);
    AVIF_CHECKERR(avifArrayCreate(expression, sizeof(expression->tokens[0]), tokenCount), AVIF_RESULT_OUT_OF_MEMORY);

    for (uint32_t t = 0; t < tokenCount; ++t) {
        avifSampleTransformToken * token = (avifSampleTransformToken *)avifArrayPush(expression);
        AVIF_CHECKERR(token != NULL, AVIF_RESULT_OUT_OF_MEMORY);

        AVIF_CHECK(avifROStreamRead(s, &token->type, /*size=*/1)); // unsigned int(8) token;
        if (token->type == AVIF_SAMPLE_TRANSFORM_CONSTANT) {
            // Two's complement representation is assumed here.
            uint32_t constant;
            AVIF_CHECK(avifROStreamReadU32(s, &constant)); // signed int(1<<(bit_depth+3)) constant;
            token->constant = *(int32_t *)&constant;       // maybe =(int32_t)constant; is enough
        } else if (token->type == AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX) {
            AVIF_CHECK(avifROStreamRead(s, &token->inputImageItemIndex, 1)); // unsigned int(8) input_image_item_index;
        }
    }
    AVIF_CHECKERR(avifROStreamRemainingBytes(s) == 0, AVIF_RESULT_BMFF_PARSE_FAILED);
    return AVIF_RESULT_OK;
}

// Parses the raw bitstream of the 'sato' Sample Transform derived image item and extracts the expression.
static avifResult avifParseSampleTransformImageBox(const uint8_t * raw,
                                                   size_t rawLen,
                                                   uint32_t numInputImageItems,
                                                   avifSampleTransformExpression * expression,
                                                   avifDiagnostics * diag)
{
    BEGIN_STREAM(s, raw, rawLen, diag, "Box[sato]");

    uint8_t version, bitDepth;
    AVIF_CHECK(avifROStreamReadBits8(&s, &version, /*bitCount=*/6));  // unsigned int(6) version = 0;
    AVIF_CHECK(avifROStreamReadBits8(&s, &bitDepth, /*bitCount=*/2)); // unsigned int(2) bit_depth;
    AVIF_CHECKERR(version == 0, AVIF_RESULT_NOT_IMPLEMENTED);
    AVIF_CHECKERR(bitDepth == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32, AVIF_RESULT_NOT_IMPLEMENTED);

    const avifResult result = avifParseSampleTransformTokens(&s, expression);
    if (result != AVIF_RESULT_OK) {
        avifArrayDestroy(expression);
        return result;
    }
    if (!avifSampleTransformExpressionIsValid(expression, numInputImageItems)) {
        avifArrayDestroy(expression);
        return AVIF_RESULT_BMFF_PARSE_FAILED;
    }
    return AVIF_RESULT_OK;
}

static avifResult avifDecoderSampleTransformItemValidateProperties(const avifDecoderItem * item, avifDiagnostics * diag)
{
    const avifProperty * pixiProp = avifPropertyArrayFind(&item->properties, "pixi");
    if (!pixiProp) {
        avifDiagnosticsPrintf(diag, "Item ID %u of type '%.4s' is missing mandatory pixi property", item->id, (const char *)item->type);
        return AVIF_RESULT_BMFF_PARSE_FAILED;
    }
    for (uint8_t i = 0; i < pixiProp->u.pixi.planeCount; ++i) {
        if (pixiProp->u.pixi.planeDepths[i] != pixiProp->u.pixi.planeDepths[0]) {
            avifDiagnosticsPrintf(diag,
                                  "Item ID %u of type '%.4s' has different depths specified by pixi property [%u, %u], this is not supported",
                                  item->id,
                                  (const char *)item->type,
                                  pixiProp->u.pixi.planeDepths[0],
                                  pixiProp->u.pixi.planeDepths[i]);
            return AVIF_RESULT_NOT_IMPLEMENTED;
        }
    }

    const avifProperty * ispeProp = avifPropertyArrayFind(&item->properties, "ispe");
    if (!ispeProp) {
        avifDiagnosticsPrintf(diag, "Item ID %u of type '%.4s' is missing mandatory ispe property", item->id, (const char *)item->type);
        return AVIF_RESULT_BMFF_PARSE_FAILED;
    }

    for (uint32_t i = 0; i < item->meta->items.count; ++i) {
        avifDecoderItem * inputImageItem = item->meta->items.item[i];
        if (inputImageItem->dimgForID != item->id) {
            continue;
        }
        // Even if inputImageItem is a grid, the ispe property from its first tile should have been copied to the grid item.
        const avifProperty * inputImageItemIspeProp = avifPropertyArrayFind(&inputImageItem->properties, "ispe");
        AVIF_ASSERT_OR_RETURN(inputImageItemIspeProp != NULL);
        if (inputImageItemIspeProp->u.ispe.width != ispeProp->u.ispe.width ||
            inputImageItemIspeProp->u.ispe.height != ispeProp->u.ispe.height) {
            avifDiagnosticsPrintf(diag,
                                  "The fields of the ispe property of item ID %u of type '%.4s' differs from item ID %u",
                                  inputImageItem->id,
                                  (const char *)inputImageItem->type,
                                  item->id);
            return AVIF_RESULT_BMFF_PARSE_FAILED;
        }
        // TODO(yguyon): Check that all input image items share the same codec config (except for the bit depth value).
    }

    AVIF_CHECKERR(avifPropertyArrayFind(&item->properties, "clap") == NULL, AVIF_RESULT_NOT_IMPLEMENTED);
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM

// Extracts the codecType from the item type or from its children.
// Also parses and outputs grid information if the item is a grid.
// isItemInInput must be false if the item is a made-up structure
// (and thus not part of the parseable input bitstream).
static avifResult avifDecoderItemReadAndParse(const avifDecoder * decoder,
                                              avifDecoderItem * item,
                                              avifBool isItemInInput,
                                              avifImageGrid * grid,
                                              avifCodecType * codecType)
{}

static avifBool avifParseImageSpatialExtentsProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseAuxiliaryTypeProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseColourInformationBox(avifProperty * prop, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseContentLightLevelInformationBox(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

// Implementation of section 2.3.3 of AV1 Codec ISO Media File Format Binding specification v1.2.0.
// See https://aomediacodec.github.io/av1-isobmff/v1.2.0.html#av1codecconfigurationbox-syntax.
static avifBool avifParseCodecConfiguration(avifROStream * s, avifCodecConfigurationBox * config, const char * configPropName, avifDiagnostics * diag)
{}

static avifBool avifParseCodecConfigurationBoxProperty(avifProperty * prop,
                                                       const uint8_t * raw,
                                                       size_t rawLen,
                                                       const char * configPropName,
                                                       avifDiagnostics * diag)
{}

static avifBool avifParsePixelAspectRatioBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseCleanApertureBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseImageRotationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseImageMirrorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParsePixelInformationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseOperatingPointSelectorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseLayerSelectorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseAV1LayeredImageIndexingProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemPropertyContainerBox(avifPropertyArray * properties,
                                                    uint64_t rawOffset,
                                                    const uint8_t * raw,
                                                    size_t rawLen,
                                                    avifDiagnostics * diag)
{}

static avifResult avifParseItemPropertyAssociation(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag, uint32_t * outVersionAndFlags)
{}

static avifBool avifParsePrimaryItemBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseItemDataBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemPropertiesBox(avifMeta * meta, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemInfoEntry(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemInfoBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseItemReferenceBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseMetaBox(avifMeta * meta, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseTrackHeaderBox(avifTrack * track,
                                        const uint8_t * raw,
                                        size_t rawLen,
                                        uint32_t imageSizeLimit,
                                        uint32_t imageDimensionLimit,
                                        avifDiagnostics * diag)
{}

static avifBool avifParseMediaHeaderBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseChunkOffsetBox(avifSampleTable * sampleTable, avifBool largeOffsets, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseSampleToChunkBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseSampleSizeBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseSyncSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseTimeToSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseSampleDescriptionBox(avifSampleTable * sampleTable,
                                                uint64_t rawOffset,
                                                const uint8_t * raw,
                                                size_t rawLen,
                                                avifDiagnostics * diag)
{}

static avifResult avifParseSampleTableBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseMediaInformationBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseMediaBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifTrackReferenceBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseEditListBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifParseEditBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifResult avifParseTrackBox(avifDecoderData * data,
                                    uint64_t rawOffset,
                                    const uint8_t * raw,
                                    size_t rawLen,
                                    uint32_t imageSizeLimit,
                                    uint32_t imageDimensionLimit)
{}

static avifResult avifParseMovieBox(avifDecoderData * data,
                                    uint64_t rawOffset,
                                    const uint8_t * raw,
                                    size_t rawLen,
                                    uint32_t imageSizeLimit,
                                    uint32_t imageDimensionLimit)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_MINI)
static avifProperty * avifMetaCreateProperty(avifMeta * meta, const char * propertyType)
{
    avifProperty * metaProperty = avifArrayPush(&meta->properties);
    AVIF_CHECK(metaProperty);
    memcpy(metaProperty->type, propertyType, 4);
    return metaProperty;
}

static avifProperty * avifDecoderItemAddProperty(avifDecoderItem * item, const avifProperty * metaProperty)
{
    avifProperty * itemProperty = avifArrayPush(&item->properties);
    AVIF_CHECK(itemProperty);
    *itemProperty = *metaProperty;
    return itemProperty;
}

static avifResult avifParseMinimizedImageBox(avifMeta * meta,
                                             uint64_t rawOffset,
                                             const uint8_t * raw,
                                             size_t rawLen,
                                             avifBool isAvifAccordingToMinorVersion,
                                             avifDiagnostics * diag)
{
    BEGIN_STREAM(s, raw, rawLen, diag, "Box[mini]");

    meta->fromMiniBox = AVIF_TRUE;

    uint32_t version;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &version, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(2) version = 0;
    AVIF_CHECKERR(version == 0, AVIF_RESULT_BMFF_PARSE_FAILED);

    // flags
    uint32_t hasExplicitCodecTypes, floatFlag, fullRange, hasAlpha, hasExplicitCicp, hasHdr, hasIcc, hasExif, hasXmp;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasExplicitCodecTypes, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) explicit_codec_types_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &floatFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED);             // bit(1) float_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &fullRange, 1), AVIF_RESULT_BMFF_PARSE_FAILED);             // bit(1) full_range_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasAlpha, 1), AVIF_RESULT_BMFF_PARSE_FAILED);              // bit(1) alpha_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasExplicitCicp, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) explicit_cicp_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasHdr, 1), AVIF_RESULT_BMFF_PARSE_FAILED);          // bit(1) hdr_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasIcc, 1), AVIF_RESULT_BMFF_PARSE_FAILED);          // bit(1) icc_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasExif, 1), AVIF_RESULT_BMFF_PARSE_FAILED);         // bit(1) exif_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &hasXmp, 1), AVIF_RESULT_BMFF_PARSE_FAILED);          // bit(1) xmp_flag;

    uint32_t chromaSubsampling, orientation;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &chromaSubsampling, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(2) chroma_subsampling;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &orientation, 3), AVIF_RESULT_BMFF_PARSE_FAILED);       // bit(3) orientation_minus1;
    ++orientation;

    // Spatial extents
    uint32_t smallDimensionsFlag, width, height;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &smallDimensionsFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) small_dimensions_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &width, smallDimensionsFlag ? 7 : 15),
                  AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(small_dimensions_flag ? 7 : 15) width_minus1;
    ++width;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &height, smallDimensionsFlag ? 7 : 15),
                  AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(small_dimensions_flag ? 7 : 15) height_minus1;
    ++height;

    // Pixel information
    uint32_t chromaIsHorizontallyCentered = 0, chromaIsVerticallyCentered = 0;
    if (chromaSubsampling == 1 || chromaSubsampling == 2) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &chromaIsHorizontallyCentered, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) chroma_is_horizontally_centered;
    }
    if (chromaSubsampling == 1) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &chromaIsVerticallyCentered, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) chroma_is_vertically_centered;
    }

    uint32_t bitDepth;
    if (floatFlag) {
        // bit(2) bit_depth_log2_minus4;
        return AVIF_RESULT_BMFF_PARSE_FAILED; // Either invalid AVIF or unsupported non-AVIF.
    } else {
        uint32_t highBitDepthFlag;
        AVIF_CHECKERR(avifROStreamReadBits(&s, &highBitDepthFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) high_bit_depth_flag;
        if (highBitDepthFlag) {
            AVIF_CHECKERR(avifROStreamReadBits(&s, &bitDepth, 3), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(3) bit_depth_minus9;
            bitDepth += 9;
        } else {
            bitDepth = 8;
        }
    }

    uint32_t alphaIsPremultiplied = 0;
    if (hasAlpha) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &alphaIsPremultiplied, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) alpha_is_premultiplied;
    }

    // Colour properties
    uint8_t colorPrimaries;
    uint8_t transferCharacteristics;
    uint8_t matrixCoefficients;
    if (hasExplicitCicp) {
        AVIF_CHECKERR(avifROStreamReadBits8(&s, &colorPrimaries, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) colour_primaries;
        AVIF_CHECKERR(avifROStreamReadBits8(&s, &transferCharacteristics, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) transfer_characteristics;
        if (chromaSubsampling != 0) {
            AVIF_CHECKERR(avifROStreamReadBits8(&s, &matrixCoefficients, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) matrix_coefficients;
        } else {
            matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED; // 2
        }
    } else {
        colorPrimaries = hasIcc ? AVIF_COLOR_PRIMARIES_UNSPECIFIED                         // 2
                                : AVIF_COLOR_PRIMARIES_BT709;                              // 1
        transferCharacteristics = hasIcc ? AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED       // 2
                                         : AVIF_TRANSFER_CHARACTERISTICS_SRGB;             // 13
        matrixCoefficients = chromaSubsampling == 0 ? AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED // 2
                                                    : AVIF_MATRIX_COEFFICIENTS_BT601;      // 6
    }

    uint8_t infeType[4];
    uint8_t codecConfigType[4];
    if (hasExplicitCodecTypes) {
        // bit(32) infe_type;
        for (int i = 0; i < 4; ++i) {
            AVIF_CHECKERR(avifROStreamReadBits8(&s, &infeType[i], 8), AVIF_RESULT_BMFF_PARSE_FAILED);
        }
        // bit(32) codec_config_type;
        for (int i = 0; i < 4; ++i) {
            AVIF_CHECKERR(avifROStreamReadBits8(&s, &codecConfigType[i], 8), AVIF_RESULT_BMFF_PARSE_FAILED);
        }
#if defined(AVIF_CODEC_AVM)
        if (!memcmp(infeType, "av02", 4) && !memcmp(codecConfigType, "av2C", 4)) {
            return AVIF_RESULT_NOT_IMPLEMENTED;
        }
#endif
        AVIF_CHECKERR(!memcmp(infeType, "av01", 4) && !memcmp(codecConfigType, "av1C", 4), AVIF_RESULT_BMFF_PARSE_FAILED);
    } else {
        AVIF_CHECKERR(isAvifAccordingToMinorVersion, AVIF_RESULT_BMFF_PARSE_FAILED);
        memcpy(infeType, "av01", 4);
        memcpy(codecConfigType, "av1C", 4);
    }

    // High Dynamic Range properties
    uint32_t hasGainmap = AVIF_FALSE;
    if (hasHdr) {
        // bit(1) gainmap_flag;
        // if (gainmap_flag) {
        //     unsigned int(small_dimensions_flag ? 7 : 15) gainmap_width_minus1;
        //     unsigned int(small_dimensions_flag ? 7 : 15) gainmap_height_minus1;
        //     bit(8) gainmap_matrix_coefficients;
        //     bit(1) gainmap_full_range_flag;
        //     bit(2) gainmap_chroma_subsampling;
        //     if (gainmap_chroma_subsampling == 1 || gainmap_chroma_subsampling == 2)
        //         bit(1) gainmap_chroma_is_horizontally_centered;
        //     if (gainmap_chroma_subsampling == 1)
        //         bit(1) gainmap_chroma_is_vertically_centered;
        //     bit(1) gainmap_float_flag;
        //     if (gainmap_float_flag)
        //         bit(2) gainmap_bit_depth_log2_minus4;
        //     else {
        //         bit(1) gainmap_high_bit_depth_flag;
        //         if (gainmap_high_bit_depth_flag)
        //             bit(3) gainmap_bit_depth_minus9;
        //     }
        //     bit(1) tmap_icc_flag;
        //     bit(1) tmap_explicit_cicp_flag;
        //     if (tmap_explicit_cicp_flag) {
        //         bit(8) tmap_colour_primaries;
        //         bit(8) tmap_transfer_characteristics;
        //         bit(8) tmap_matrix_coefficients;
        //         bit(1) tmap_full_range_flag;
        //     }
        //     else {
        //         tmap_colour_primaries = 1;
        //         tmap_transfer_characteristics = 13;
        //         tmap_matrix_coefficients = 6;
        //         tmap_full_range_flag = 1;
        //     }
        // }
        // bit(1) clli_flag;
        // bit(1) mdcv_flag;
        // bit(1) cclv_flag;
        // bit(1) amve_flag;
        // bit(1) reve_flag;
        // bit(1) ndwt_flag;
        // if (clli_flag)
        //     ContentLightLevel clli;
        // if (mdcv_flag)
        //     MasteringDisplayColourVolume mdcv;
        // if (cclv_flag)
        //     ContentColourVolume cclv;
        // if (amve_flag)
        //     AmbientViewingEnvironment amve;
        // if (reve_flag)
        //     ReferenceViewingEnvironment reve;
        // if (ndwt_flag)
        //     NominalDiffuseWhite ndwt;
        // if (gainmap_flag) {
        //     bit(1) tmap_clli_flag;
        //     bit(1) tmap_mdcv_flag;
        //     bit(1) tmap_cclv_flag;
        //     bit(1) tmap_amve_flag;
        //     bit(1) tmap_reve_flag;
        //     bit(1) tmap_ndwt_flag;
        //     if (tmap_clli_flag)
        //         ContentLightLevel tmap_clli;
        //     if (tmap_mdcv_flag)
        //         MasteringDisplayColourVolume tmap_mdcv;
        //     if (tmap_cclv_flag)
        //         ContentColourVolume tmap_cclv;
        //     if (tmap_amve_flag)
        //         AmbientViewingEnvironment tmap_amve;
        //     if (tmap_reve_flag)
        //         ReferenceViewingEnvironment tmap_reve;
        //     if (tmap_ndwt_flag)
        //         NominalDiffuseWhite tmap_ndwt;
        // }
        return AVIF_RESULT_NOT_IMPLEMENTED;
    }

    // Chunk sizes
    uint32_t fewMetadataBytesFlag = 0, fewCodecConfigBytesFlag = 0, fewItemDataBytesFlag = 0;
    if (hasIcc || hasExif || hasXmp || (hasHdr && hasGainmap)) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &fewMetadataBytesFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) few_metadata_bytes_flag;
    }
    AVIF_CHECKERR(avifROStreamReadBits(&s, &fewCodecConfigBytesFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) few_codec_config_bytes_flag;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &fewItemDataBytesFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) few_item_data_bytes_flag;

    uint32_t iccDataSize = 0;
    if (hasIcc) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &iccDataSize, fewMetadataBytesFlag ? 10 : 20),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_metadata_bytes_flag ? 10 : 20) icc_data_size_minus1;
        ++iccDataSize;
    }
    // if (hdr_flag && gainmap_flag && tmap_icc_flag)
    //     unsigned int(few_metadata_bytes_flag ? 10 : 20) tmap_icc_data_size_minus1;

    // if (hdr_flag && gainmap_flag)
    //     unsigned int(few_metadata_bytes_flag ? 10 : 20) gainmap_metadata_size;
    // if (hdr_flag && gainmap_flag)
    //     unsigned int(few_item_data_bytes_flag ? 15 : 28) gainmap_item_data_size;
    // if (hdr_flag && gainmap_flag && gainmap_item_data_size > 0)
    //     unsigned int(few_codec_config_bytes_flag ? 3 : 12) gainmap_item_codec_config_size;

    uint32_t mainItemCodecConfigSize, mainItemDataSize;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &mainItemCodecConfigSize, fewCodecConfigBytesFlag ? 3 : 12),
                  AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_codec_config_bytes_flag ? 3 : 12) main_item_codec_config_size;
    AVIF_CHECKERR(avifROStreamReadBits(&s, &mainItemDataSize, fewItemDataBytesFlag ? 15 : 28),
                  AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_item_data_bytes_flag ? 15 : 28) main_item_data_size_minus1;
    ++mainItemDataSize;

    uint32_t alphaItemCodecConfigSize = 0, alphaItemDataSize = 0;
    if (hasAlpha) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &alphaItemDataSize, fewItemDataBytesFlag ? 15 : 28),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_item_data_bytes_flag ? 15 : 28) alpha_item_data_size;
    }
    if (hasAlpha && alphaItemDataSize != 0) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &alphaItemCodecConfigSize, fewCodecConfigBytesFlag ? 3 : 12),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_codec_config_bytes_flag ? 3 : 12) alpha_item_codec_config_size;
    }

    uint32_t exifDataSize = 0;
    if (hasExif) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &exifDataSize, fewMetadataBytesFlag ? 10 : 20),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_metadata_bytes_flag ? 10 : 20) exif_data_size_minus_one;
        ++exifDataSize;
    }
    uint32_t xmpDataSize = 0;
    if (hasXmp) {
        AVIF_CHECKERR(avifROStreamReadBits(&s, &xmpDataSize, fewMetadataBytesFlag ? 10 : 20),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(few_metadata_bytes_flag ? 10 : 20) xmp_data_size_minus_one;
        ++xmpDataSize;
    }

    // trailing_bits(); // bit padding till byte alignment
    if (s.numUsedBitsInPartialByte) {
        uint32_t padding;
        AVIF_CHECKERR(avifROStreamReadBits(&s, &padding, 8 - s.numUsedBitsInPartialByte), AVIF_RESULT_BMFF_PARSE_FAILED);
        AVIF_CHECKERR(!padding, AVIF_RESULT_BMFF_PARSE_FAILED); // Only accept zeros as padding.
    }

    // Chunks
    avifCodecConfigurationBox alphaItemCodecConfig = { 0 };
    if (hasAlpha && alphaItemDataSize != 0 && alphaItemCodecConfigSize != 0) {
        // 'av1C' always uses 4 bytes.
        AVIF_CHECKERR(alphaItemCodecConfigSize == 4, AVIF_RESULT_BMFF_PARSE_FAILED);
        AVIF_CHECKERR(avifParseCodecConfiguration(&s, &alphaItemCodecConfig, (const char *)codecConfigType, diag),
                      AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) alpha_item_codec_config[alpha_item_codec_config_size];
    }
    // if (hdr_flag && gainmap_flag && gainmap_item_codec_config_size > 0)
    //     unsigned int(8) gainmap_item_codec_config[gainmap_item_codec_config_size];
    avifCodecConfigurationBox mainItemCodecConfig;
    // 'av1C' always uses 4 bytes.
    AVIF_CHECKERR(mainItemCodecConfigSize == 4, AVIF_RESULT_BMFF_PARSE_FAILED);
    AVIF_CHECKERR(avifParseCodecConfiguration(&s, &mainItemCodecConfig, (const char *)codecConfigType, diag),
                  AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) main_item_codec_config[main_item_codec_config_size];

    // Make sure all metadata and coded chunks fit into the 'meta' box whose size is rawLen.
    // There should be no missing nor unused byte.

    AVIF_CHECKERR(avifROStreamRemainingBytes(&s) == (uint64_t)iccDataSize + alphaItemDataSize + mainItemDataSize + exifDataSize + xmpDataSize,
                  AVIF_RESULT_BMFF_PARSE_FAILED);

    // Create the items and properties generated by the MinimizedImageBox.
    // The MinimizedImageBox always creates 8 properties for specification easiness.
    // Use FreeSpaceBoxes as no-op placeholder properties when necessary.
    // There is no need to use placeholder items because item IDs do not have to
    // be contiguous, whereas property indices shall be 1, 2, 3, 4, 5 etc.

    meta->primaryItemID = 1;
    avifDecoderItem * colorItem;
    AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, meta->primaryItemID, &colorItem));
    memcpy(colorItem->type, infeType, 4);
    colorItem->width = width;
    colorItem->height = height;
    colorItem->miniBoxPixelFormat = chromaSubsampling == 0   ? AVIF_PIXEL_FORMAT_YUV400
                                    : chromaSubsampling == 1 ? AVIF_PIXEL_FORMAT_YUV420
                                    : chromaSubsampling == 2 ? AVIF_PIXEL_FORMAT_YUV422
                                                             : AVIF_PIXEL_FORMAT_YUV444;
    if (colorItem->miniBoxPixelFormat == AVIF_PIXEL_FORMAT_YUV422) {
        // In AV1, the chroma_sample_position syntax element is not present for the YUV 4:2:2 format.
        // Assume that AV1 uses the same 4:2:2 chroma sample location as HEVC and VVC (colocated).
        AVIF_CHECKERR(!chromaIsHorizontallyCentered, AVIF_RESULT_BMFF_PARSE_FAILED);
        // chromaIsVerticallyCentered: Ignored unless chroma_subsampling is 1.
        colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN;
    } else if (colorItem->miniBoxPixelFormat == AVIF_PIXEL_FORMAT_YUV420) {
        if (chromaIsHorizontallyCentered) {
            // There is no way to describe this with AV1's chroma_sample_position enum besides CSP_UNKNOWN.
            // There is a proposal to assign the reserved value 3 (CSP_RESERVED) to the center chroma sample position.
            colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN;
        } else {
            colorItem->miniBoxChromaSamplePosition = chromaIsVerticallyCentered ? AVIF_CHROMA_SAMPLE_POSITION_VERTICAL
                                                                                : AVIF_CHROMA_SAMPLE_POSITION_COLOCATED;
        }
    } else {
        // chromaIsHorizontallyCentered: Ignored unless chroma_subsampling is 1 or 2.
        // chromaIsVerticallyCentered: Ignored unless chroma_subsampling is 1.
        colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN;
    }

    avifDecoderItem * alphaItem = NULL;
    if (hasAlpha) {
        AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/2, &alphaItem));
        memcpy(alphaItem->type, infeType, 4);
        alphaItem->width = width;
        alphaItem->height = height;
        alphaItem->miniBoxPixelFormat = AVIF_PIXEL_FORMAT_YUV400;
        alphaItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN;
    }

    // Property with fixed index 1.
    avifProperty * colorCodecConfigProp = avifMetaCreateProperty(meta, (const char *)codecConfigType);
    AVIF_CHECKERR(colorCodecConfigProp, AVIF_RESULT_OUT_OF_MEMORY);
    colorCodecConfigProp->u.av1C = mainItemCodecConfig;
    AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colorCodecConfigProp), AVIF_RESULT_OUT_OF_MEMORY);

    // Property with fixed index 2.
    avifProperty * ispeProp = avifMetaCreateProperty(meta, "ispe");
    AVIF_CHECKERR(ispeProp, AVIF_RESULT_OUT_OF_MEMORY);
    ispeProp->u.ispe.width = width;
    ispeProp->u.ispe.height = height;
    AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, ispeProp), AVIF_RESULT_OUT_OF_MEMORY);

    // Property with fixed index 3.
    avifProperty * pixiProp = avifMetaCreateProperty(meta, "pixi");
    AVIF_CHECKERR(pixiProp, AVIF_RESULT_OUT_OF_MEMORY);
    pixiProp->u.pixi.planeCount = chromaSubsampling == 0 ? 1 : 3;
    for (uint8_t plane = 0; plane < pixiProp->u.pixi.planeCount; ++plane) {
        pixiProp->u.pixi.planeDepths[plane] = (uint8_t)bitDepth;
    }
    AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, pixiProp), AVIF_RESULT_OUT_OF_MEMORY);

    // Property with fixed index 4.
    avifProperty * colrPropNCLX = avifMetaCreateProperty(meta, "colr");
    AVIF_CHECKERR(colrPropNCLX, AVIF_RESULT_OUT_OF_MEMORY);
    colrPropNCLX->u.colr.hasNCLX = AVIF_TRUE; // colour_type "nclx"
    colrPropNCLX->u.colr.colorPrimaries = (avifColorPrimaries)colorPrimaries;
    colrPropNCLX->u.colr.transferCharacteristics = (avifTransferCharacteristics)transferCharacteristics;
    colrPropNCLX->u.colr.matrixCoefficients = (avifMatrixCoefficients)matrixCoefficients;
    colrPropNCLX->u.colr.range = fullRange ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED;
    AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colrPropNCLX), AVIF_RESULT_OUT_OF_MEMORY);

    // Property with fixed index 5.
    if (iccDataSize) {
        avifProperty * colrPropICC = avifMetaCreateProperty(meta, "colr");
        AVIF_CHECKERR(colrPropICC, AVIF_RESULT_OUT_OF_MEMORY);
        colrPropICC->u.colr.hasICC = AVIF_TRUE; // colour_type "rICC" or "prof"
        colrPropICC->u.colr.iccOffset = rawOffset + avifROStreamOffset(&s);
        colrPropICC->u.colr.iccSize = (size_t)iccDataSize;
        AVIF_CHECKERR(avifROStreamSkip(&s, colrPropICC->u.colr.iccSize), AVIF_RESULT_BMFF_PARSE_FAILED);
        AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colrPropICC), AVIF_RESULT_OUT_OF_MEMORY);
    } else {
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder.
    }

    // if (hdr_flag && gainmap_flag && tmap_icc_flag)
    //     unsigned int(8) tmap_icc_data[tmap_icc_data_size_minus1 + 1];
    // if (hdr_flag && gainmap_flag && gainmap_metadata_size > 0)
    //     unsigned int(8) gainmap_metadata[gainmap_metadata_size];

    if (hasAlpha) {
        // Property with fixed index 6.
        avifProperty * alphaCodecConfigProp = avifMetaCreateProperty(meta, (const char *)codecConfigType);
        AVIF_CHECKERR(alphaCodecConfigProp, AVIF_RESULT_OUT_OF_MEMORY);
        alphaCodecConfigProp->u.av1C = alphaItemCodecConfig;
        AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaCodecConfigProp), AVIF_RESULT_OUT_OF_MEMORY);

        // Property with fixed index 7.
        alphaItem->auxForID = colorItem->id;
        colorItem->premByID = alphaIsPremultiplied;
        avifProperty * alphaAuxProp = avifMetaCreateProperty(meta, "auxC");
        AVIF_CHECKERR(alphaAuxProp, AVIF_RESULT_OUT_OF_MEMORY);
        strcpy(alphaAuxProp->u.auxC.auxType, AVIF_URN_ALPHA0);
        AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaAuxProp), AVIF_RESULT_OUT_OF_MEMORY);

        // Property with fixed index 2 (reused).
        AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, ispeProp), AVIF_RESULT_OUT_OF_MEMORY);

        // Property with fixed index 8.
        avifProperty * alphaPixiProp = avifMetaCreateProperty(meta, "pixi");
        AVIF_CHECKERR(alphaPixiProp, AVIF_RESULT_OUT_OF_MEMORY);
        memcpy(alphaPixiProp->type, "pixi", 4);
        alphaPixiProp->u.pixi.planeCount = 1;
        alphaPixiProp->u.pixi.planeDepths[0] = (uint8_t)bitDepth;
        AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaPixiProp), AVIF_RESULT_OUT_OF_MEMORY);
    } else {
        // Placeholders 6, 7 and 8.
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY);
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY);
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY);
    }

    // Same behavior as avifImageExtractExifOrientationToIrotImir().
    if (orientation == 3 || orientation == 5 || orientation == 6 || orientation == 7 || orientation == 8) {
        // Property with fixed index 9.
        avifProperty * irotProp = avifMetaCreateProperty(meta, "irot");
        AVIF_CHECKERR(irotProp, AVIF_RESULT_OUT_OF_MEMORY);
        irotProp->u.irot.angle = orientation == 3 ? 2 : (orientation == 5 || orientation == 8) ? 1 : 3;
        AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, irotProp), AVIF_RESULT_OUT_OF_MEMORY);
    } else {
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder.
    }
    if (orientation == 2 || orientation == 4 || orientation == 5 || orientation == 7) {
        // Property with fixed index 10.
        avifProperty * imirProp = avifMetaCreateProperty(meta, "imir");
        AVIF_CHECKERR(imirProp, AVIF_RESULT_OUT_OF_MEMORY);
        imirProp->u.imir.axis = orientation == 2 ? 1 : 0;
        AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, imirProp), AVIF_RESULT_OUT_OF_MEMORY);
    } else {
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder.
    }

    // HDR placeholders (unnecessary but added for specification matching).
    for (int i = 11; i <= 29; ++i) {
        AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY);
    }

    // Extents.

    if (hasAlpha) {
        avifExtent * alphaExtent = (avifExtent *)avifArrayPush(&alphaItem->extents);
        AVIF_CHECKERR(alphaExtent, AVIF_RESULT_OUT_OF_MEMORY);
        alphaExtent->offset = rawOffset + avifROStreamOffset(&s);
        alphaExtent->size = (size_t)alphaItemDataSize;
        AVIF_CHECKERR(avifROStreamSkip(&s, alphaExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED);
        alphaItem->size = alphaExtent->size;
    }

    // if (hdr_flag && gainmap_flag && gainmap_item_data_size > 0)
    //     unsigned int(8) gainmap_item_data[gainmap_item_data_size];

    avifExtent * colorExtent = (avifExtent *)avifArrayPush(&colorItem->extents);
    AVIF_CHECKERR(colorExtent, AVIF_RESULT_OUT_OF_MEMORY);
    colorExtent->offset = rawOffset + avifROStreamOffset(&s);
    colorExtent->size = (size_t)mainItemDataSize;
    AVIF_CHECKERR(avifROStreamSkip(&s, colorExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED);
    colorItem->size = colorExtent->size;

    if (hasExif) {
        avifDecoderItem * exifItem;
        AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/3, &exifItem));
        memcpy(exifItem->type, "Exif", 4);
        exifItem->descForID = colorItem->id;
        colorItem->premByID = alphaIsPremultiplied;

        avifExtent * exifExtent = (avifExtent *)avifArrayPush(&exifItem->extents);
        AVIF_CHECKERR(exifExtent, AVIF_RESULT_OUT_OF_MEMORY);
        exifExtent->offset = rawOffset + avifROStreamOffset(&s);
        exifExtent->size = (size_t)exifDataSize; // Does not include unsigned int(32) exif_tiff_header_offset;
        AVIF_CHECKERR(avifROStreamSkip(&s, exifExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED);
        exifItem->size = exifExtent->size;
    }

    if (hasXmp) {
        avifDecoderItem * xmpItem;
        AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/4, &xmpItem));
        memcpy(xmpItem->type, "mime", 4);
        memcpy(xmpItem->contentType.contentType, AVIF_CONTENT_TYPE_XMP, sizeof(AVIF_CONTENT_TYPE_XMP));
        xmpItem->descForID = colorItem->id;
        colorItem->premByID = alphaIsPremultiplied;

        avifExtent * xmpExtent = (avifExtent *)avifArrayPush(&xmpItem->extents);
        AVIF_CHECKERR(xmpExtent, AVIF_RESULT_OUT_OF_MEMORY);
        xmpExtent->offset = rawOffset + avifROStreamOffset(&s);
        xmpExtent->size = (size_t)xmpDataSize;
        AVIF_CHECKERR(avifROStreamSkip(&s, xmpExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED);
        xmpItem->size = xmpExtent->size;
    }
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_MINI

static avifBool avifParseFileTypeBox(avifFileType * ftyp, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag)
{}

static avifBool avifFileTypeHasBrand(avifFileType * ftyp, const char * brand);
static avifBool avifFileTypeIsCompatible(avifFileType * ftyp);

static avifResult avifParse(avifDecoder * decoder)
{}

// ---------------------------------------------------------------------------

static avifBool avifFileTypeHasBrand(avifFileType * ftyp, const char * brand)
{}

static avifBool avifFileTypeIsCompatible(avifFileType * ftyp)
{}

avifBool avifPeekCompatibleFileType(const avifROData * input)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP)
static avifBool avifBrandArrayHasBrand(avifBrandArray * brands, const char * brand)
{}
#endif

// ---------------------------------------------------------------------------

avifDecoder * avifDecoderCreate(void)
{}

static void avifDecoderCleanup(avifDecoder * decoder)
{}

void avifDecoderDestroy(avifDecoder * decoder)
{}

avifResult avifDecoderSetSource(avifDecoder * decoder, avifDecoderSource source)
{}

void avifDecoderSetIO(avifDecoder * decoder, avifIO * io)
{}

avifResult avifDecoderSetIOMemory(avifDecoder * decoder, const uint8_t * data, size_t size)
{}

avifResult avifDecoderSetIOFile(avifDecoder * decoder, const char * filename)
{}

// 0-byte extents are ignored/overwritten during the merge, as they are the signal from helper
// functions that no extent was necessary for this given sample. If both provided extents are
// >0 bytes, this will set dst to be an extent that bounds both supplied extents.
static avifResult avifExtentMerge(avifExtent * dst, const avifExtent * src)
{}

avifResult avifDecoderNthImageMaxExtent(const avifDecoder * decoder, uint32_t frameIndex, avifExtent * outExtent)
{}

static avifResult avifDecoderPrepareSample(avifDecoder * decoder, avifDecodeSample * sample, size_t partialByteCount)
{}

// Returns AVIF_TRUE if the item should be skipped. Items should be skipped for one of the following reasons:
//  * Size is 0.
//  * Has an essential property that isn't supported by libavif.
//  * Item is not a single image or a grid.
//  * Item is a thumbnail.
static avifBool avifDecoderItemShouldBeSkipped(const avifDecoderItem * item)
{}

avifResult avifDecoderParse(avifDecoder * decoder)
{}

static avifResult avifCodecCreateInternal(avifCodecChoice choice, const avifTile * tile, avifDiagnostics * diag, avifCodec ** codec)
{}

static avifBool avifTilesCanBeDecodedWithSameCodecInstance(avifDecoderData * data)
{}

static avifResult avifDecoderCreateCodecs(avifDecoder * decoder)
{}

// Returns the primary color item if found, or NULL.
static avifDecoderItem * avifMetaFindColorItem(avifMeta * meta)
{}

// Returns AVIF_TRUE if item is an alpha auxiliary item of the parent color
// item.
static avifBool avifDecoderItemIsAlphaAux(const avifDecoderItem * item, uint32_t colorItemId)
{}

// Finds the alpha item whose parent item is colorItem and sets it in the alphaItem output parameter. Returns AVIF_RESULT_OK on
// success. Note that *alphaItem can be NULL even if the return value is AVIF_RESULT_OK. If the colorItem is a grid and the alpha
// item is represented as a set of auxl items to each color tile, then a fake item will be created and *isAlphaItemInInput will be
// set to AVIF_FALSE. In this case, the alpha item merely exists to hold the locations of the alpha tile items. The data of this
// item need not be read and the pixi property cannot be validated. Otherwise, *isAlphaItemInInput will be set to AVIF_TRUE when
// *alphaItem is not NULL.
static avifResult avifMetaFindAlphaItem(avifMeta * meta,
                                        const avifDecoderItem * colorItem,
                                        const avifTileInfo * colorInfo,
                                        avifDecoderItem ** alphaItem,
                                        avifTileInfo * alphaInfo,
                                        avifBool * isAlphaItemInInput)
{}

// On success, this function returns AVIF_RESULT_OK and does the following:
// * If a nclx property was found in |properties|:
//   - Set |*colorPrimaries|, |*transferCharacteristics|, |*matrixCoefficients|
//     and |*yuvRange|.
//   - If cicpSet is not NULL, set |*cicpSet| to AVIF_TRUE.
// This function fails if more than one nclx property is found in |properties|.
// The output parameters may be populated even in case of failure and must be
// ignored.
static avifResult avifReadColorNclxProperty(const avifPropertyArray * properties,
                                            avifColorPrimaries * colorPrimaries,
                                            avifTransferCharacteristics * transferCharacteristics,
                                            avifMatrixCoefficients * matrixCoefficients,
                                            avifRange * yuvRange,
                                            avifBool * cicpSet)
{}

// On success, this function returns AVIF_RESULT_OK and does the following:
// * If a colr property was found in |properties|:
//   - Read the icc data into |icc| from |io|.
//   - Sets the CICP values as documented in avifReadColorNclxProperty().
// This function fails if more than one icc or nclx property is found in
// |properties|. The output parameters may be populated even in case of failure
// and must be ignored (and the |icc| object may need to be freed).
static avifResult avifReadColorProperties(avifIO * io,
                                          const avifPropertyArray * properties,
                                          avifRWData * icc,
                                          avifColorPrimaries * colorPrimaries,
                                          avifTransferCharacteristics * transferCharacteristics,
                                          avifMatrixCoefficients * matrixCoefficients,
                                          avifRange * yuvRange,
                                          avifBool * cicpSet)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP)
// Finds a 'tmap' (tone mapped image item) box associated with the given 'colorItem'.
// If found, fills 'toneMappedImageItem' and  sets 'gainMapItemID' to the id of the gain map
// item associated with the box. Otherwise, sets 'toneMappedImageItem' to NULL.
// Returns AVIF_RESULT_OK if no errors were encountered (whether or not a tmap box was found).
// Assumes that there is a single tmap item, and not, e.g., a grid of tmap items.
// TODO(maryla): add support for files with multiple tmap items if it gets allowed by the spec.
static avifResult avifDecoderDataFindToneMappedImageItem(const avifDecoderData * data,
                                                         const avifDecoderItem * colorItem,
                                                         avifDecoderItem ** toneMappedImageItem,
                                                         uint32_t * gainMapItemID)
{}

// Finds a 'tmap' (tone mapped image item) box associated with the given 'colorItem',
// then finds the associated gain map image.
// If found, fills 'toneMappedImageItem', 'gainMapItem' and 'gainMapCodecType', and
// allocates and fills metadata in decoder->image->gainMap.
// Otherwise, sets 'toneMappedImageItem' and 'gainMapItem' to NULL.
// Returns AVIF_RESULT_OK if no errors were encountered (whether or not a gain map was found).
// Assumes that there is a single tmap item, and not, e.g., a grid of tmap items.
static avifResult avifDecoderFindGainMapItem(const avifDecoder * decoder,
                                             const avifDecoderItem * colorItem,
                                             avifDecoderItem ** toneMappedImageItem,
                                             avifDecoderItem ** gainMapItem,
                                             avifCodecType * gainMapCodecType)
{}

static avifResult aviDecoderCheckGainMapProperties(avifDecoder * decoder, const avifPropertyArray * gainMapProperties)
{}
#endif // AVIF_ENABLE_EXPERIMENTAL_GAIN_MAP

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
// Finds a 'sato' Sample Transform derived image item box.
// If found, fills 'sampleTransformItem'. Otherwise, sets 'sampleTransformItem' to NULL.
// Returns AVIF_RESULT_OK on success (whether or not a 'sato' box was found).
// Assumes that there is a single 'sato' item.
// Assumes that the 'sato' item is not the primary item and that both the primary item and 'sato'
// are in the same 'altr' group.
// TODO(yguyon): Check instead of assuming.
static avifResult avifDecoderDataFindSampleTransformImageItem(avifDecoderData * data, avifDecoderItem ** sampleTransformItem)
{
    for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) {
        avifDecoderItem * item = data->meta->items.item[itemIndex];
        if (!item->size || item->hasUnsupportedEssentialProperty || item->thumbnailForID != 0) {
            continue;
        }
        if (!memcmp(item->type, "sato", 4)) {
            *sampleTransformItem = item;
            return AVIF_RESULT_OK;
        }
    }
    *sampleTransformItem = NULL;
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM

static avifResult avifDecoderGenerateImageTiles(avifDecoder * decoder, avifTileInfo * info, avifDecoderItem * item, avifItemCategory itemCategory)
{}

// Populates depth, yuvFormat and yuvChromaSamplePosition fields on 'image' based on data from the codec config property (e.g. "av1C").
static avifResult avifReadCodecConfigProperty(avifImage * image, const avifPropertyArray * properties, avifCodecType codecType)
{}

avifResult avifDecoderReset(avifDecoder * decoder)
{}

static avifResult avifDecoderPrepareTiles(avifDecoder * decoder, uint32_t nextImageIndex, const avifTileInfo * info)
{}

static avifResult avifImageLimitedToFullAlpha(avifImage * image)
{}

static avifResult avifGetErrorForItemCategory(avifItemCategory itemCategory)
{}

static avifResult avifDecoderDecodeTiles(avifDecoder * decoder, uint32_t nextImageIndex, avifTileInfo * info)
{}

// Returns AVIF_FALSE if there is currently a partially decoded frame.
static avifBool avifDecoderDataFrameFullyDecoded(const avifDecoderData * data)
{}

#if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM)
static avifResult avifDecoderApplySampleTransform(const avifDecoder * decoder, avifImage * dstImage)
{
    if (dstImage->depth != decoder->data->meta->sampleTransformDepth) {
        AVIF_ASSERT_OR_RETURN(dstImage->yuvPlanes[0] != NULL);
        AVIF_ASSERT_OR_RETURN(dstImage->imageOwnsYUVPlanes);

        // Use a temporary buffer because dstImage may point to decoder->image, which could be an input image.
        avifImage * dstImageWithCorrectDepth =
            avifImageCreate(dstImage->width, dstImage->height, decoder->data->meta->sampleTransformDepth, dstImage->yuvFormat);
        AVIF_CHECKERR(dstImageWithCorrectDepth != NULL, AVIF_RESULT_OUT_OF_MEMORY);
        avifResult result =
            avifImageAllocatePlanes(dstImageWithCorrectDepth, dstImage->alphaPlane != NULL ? AVIF_PLANES_ALL : AVIF_PLANES_YUV);
        if (result == AVIF_RESULT_OK) {
            result = avifDecoderApplySampleTransform(decoder, dstImageWithCorrectDepth);
            if (result == AVIF_RESULT_OK) {
                // Keep the same dstImage object rather than swapping decoder->image, in case the user already accessed it.
                avifImageFreePlanes(dstImage, AVIF_PLANES_ALL);
                dstImage->depth = dstImageWithCorrectDepth->depth;
                avifImageStealPlanes(dstImage, dstImageWithCorrectDepth, AVIF_PLANES_ALL);
            }
        }
        avifImageDestroy(dstImageWithCorrectDepth);
        return result;
    }

    for (avifBool alpha = AVIF_FALSE; alpha <= decoder->alphaPresent ? AVIF_TRUE : AVIF_FALSE; ++alpha) {
        AVIF_ASSERT_OR_RETURN(decoder->data->sampleTransformNumInputImageItems <= AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS);
        const avifImage * inputImages[AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS];
        for (uint32_t i = 0; i < decoder->data->sampleTransformNumInputImageItems; ++i) {
            avifItemCategory category = decoder->data->sampleTransformInputImageItems[i];
            if (category == AVIF_ITEM_COLOR) {
                inputImages[i] = decoder->image;
            } else {
                AVIF_ASSERT_OR_RETURN(category >= AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR &&
                                      category < AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR +
                                                     AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS);
                if (alpha) {
                    category += AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS;
                }
                const avifTileInfo * tileInfo = &decoder->data->tileInfos[category];
                AVIF_CHECKERR(tileInfo->tileCount == 1, AVIF_RESULT_NOT_IMPLEMENTED); // TODO(yguyon): Implement Sample Transform grids
                inputImages[i] = decoder->data->tiles.tile[tileInfo->firstTileIndex].image;
                AVIF_ASSERT_OR_RETURN(inputImages[i] != NULL);
            }
        }
        AVIF_CHECKRES(avifImageApplyExpression(dstImage,
                                               AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32,
                                               &decoder->data->meta->sampleTransformExpression,
                                               decoder->data->sampleTransformNumInputImageItems,
                                               inputImages,
                                               alpha ? AVIF_PLANES_A : AVIF_PLANES_YUV));
    }
    return AVIF_RESULT_OK;
}
#endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM

avifResult avifDecoderNextImage(avifDecoder * decoder)
{}

avifResult avifDecoderNthImageTiming(const avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming)
{}

avifResult avifDecoderNthImage(avifDecoder * decoder, uint32_t frameIndex)
{}

avifBool avifDecoderIsKeyframe(const avifDecoder * decoder, uint32_t frameIndex)
{}

uint32_t avifDecoderNearestKeyframe(const avifDecoder * decoder, uint32_t frameIndex)
{}

// Returns the number of available rows in decoder->image given a color or alpha subimage.
static uint32_t avifGetDecodedRowCount(const avifDecoder * decoder, const avifTileInfo * info, const avifImage * image)
{}

uint32_t avifDecoderDecodedRowCount(const avifDecoder * decoder)
{}

avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image)
{}

avifResult avifDecoderReadMemory(avifDecoder * decoder, avifImage * image, const uint8_t * data, size_t size)
{}

avifResult avifDecoderReadFile(avifDecoder * decoder, avifImage * image, const char * filename)
{}