summaryrefslogtreecommitdiff
path: root/chromium/third_party/libavif/src
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/third_party/libavif/src')
-rw-r--r--chromium/third_party/libavif/src/.github/workflows/ci.yml22
-rw-r--r--chromium/third_party/libavif/src/.travis.yml29
-rw-r--r--chromium/third_party/libavif/src/CHANGELOG.md52
-rw-r--r--chromium/third_party/libavif/src/CMakeLists.txt61
-rw-r--r--chromium/third_party/libavif/src/LICENSE55
-rw-r--r--chromium/third_party/libavif/src/README.md19
-rw-r--r--chromium/third_party/libavif/src/apps/avifdec.c106
-rw-r--r--chromium/third_party/libavif/src/apps/avifdump.c90
-rw-r--r--chromium/third_party/libavif/src/apps/avifenc.c487
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifjpeg.c3
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifjpeg.h2
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifpng.c3
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifpng.h2
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifutil.c6
-rw-r--r--chromium/third_party/libavif/src/apps/shared/avifutil.h14
-rw-r--r--chromium/third_party/libavif/src/apps/shared/y4m.c332
-rw-r--r--chromium/third_party/libavif/src/apps/shared/y4m.h8
-rw-r--r--chromium/third_party/libavif/src/contrib/CMakeLists.txt4
-rw-r--r--chromium/third_party/libavif/src/contrib/README.md9
-rw-r--r--chromium/third_party/libavif/src/contrib/gdk-pixbuf/CMakeLists.txt28
-rw-r--r--chromium/third_party/libavif/src/contrib/gdk-pixbuf/loader.c395
-rwxr-xr-xchromium/third_party/libavif/src/ext/aom.cmd6
-rw-r--r--chromium/third_party/libavif/src/ext/dav1d.cmd6
-rwxr-xr-xchromium/third_party/libavif/src/ext/libgav1.cmd6
-rw-r--r--chromium/third_party/libavif/src/ext/rav1e.cmd6
-rw-r--r--chromium/third_party/libavif/src/include/avif/avif.h174
-rw-r--r--chromium/third_party/libavif/src/include/avif/internal.h102
-rw-r--r--chromium/third_party/libavif/src/src/avif.c86
-rw-r--r--chromium/third_party/libavif/src/src/codec_aom.c384
-rw-r--r--chromium/third_party/libavif/src/src/codec_dav1d.c13
-rw-r--r--chromium/third_party/libavif/src/src/codec_libgav1.c11
-rw-r--r--chromium/third_party/libavif/src/src/codec_rav1e.c298
-rw-r--r--chromium/third_party/libavif/src/src/colr.c6
-rw-r--r--chromium/third_party/libavif/src/src/mem.c6
-rw-r--r--chromium/third_party/libavif/src/src/obu.c360
-rw-r--r--chromium/third_party/libavif/src/src/read.c956
-rw-r--r--chromium/third_party/libavif/src/src/reformat.c366
-rw-r--r--chromium/third_party/libavif/src/src/stream.c48
-rw-r--r--chromium/third_party/libavif/src/src/write.c838
39 files changed, 3836 insertions, 1563 deletions
diff --git a/chromium/third_party/libavif/src/.github/workflows/ci.yml b/chromium/third_party/libavif/src/.github/workflows/ci.yml
index 0a64fbac63c..fb807dc83c9 100644
--- a/chromium/third_party/libavif/src/.github/workflows/ci.yml
+++ b/chromium/third_party/libavif/src/.github/workflows/ci.yml
@@ -4,30 +4,26 @@ jobs:
build:
runs-on: ubuntu-18.04
env:
- CC: gcc-8
- CXX: g++-8
+ CC: gcc-10
+ CXX: g++-10
steps:
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: '3.8'
- - name: Install nasm
- env:
- LINK: http://debian-archive.trafficmanager.net/debian/pool/main/n/nasm
- NASM_VERSION: 2.14.02-1
- NASM_SHA256: >-
- 5225d0654783134ae616f56ce8649e4df09cba191d612a0300cfd0494bb5a3ef
- run: |
- curl -O "$LINK/nasm_${NASM_VERSION}_amd64.deb"
- echo "$NASM_SHA256 nasm_${NASM_VERSION}_amd64.deb" | sha256sum --check
- sudo dpkg -i "nasm_${NASM_VERSION}_amd64.deb"
- name: Install dependencies
run: |
DEBIAN_FRONTEND=noninteractive sudo apt-get update
- DEBIAN_FRONTEND=noninteractive sudo apt-get install -y ninja-build gcc-8 g++-8
+ DEBIAN_FRONTEND=noninteractive sudo apt-get install -y ninja-build gcc-10 g++-10
pip install --upgrade pip
pip install setuptools
pip install meson
+ - name: Install nasm
+ run: |
+ curl -L https://download.videolan.org/contrib/nasm/nasm-2.14.tar.gz | tar xvz
+ cd nasm-2.14
+ ./configure && make -j2 && sudo make install
+ nasm --version
- uses: actions/checkout@v2
- name: Setup aom
working-directory: ./ext
diff --git a/chromium/third_party/libavif/src/.travis.yml b/chromium/third_party/libavif/src/.travis.yml
index d7d1a788547..4670c97c973 100644
--- a/chromium/third_party/libavif/src/.travis.yml
+++ b/chromium/third_party/libavif/src/.travis.yml
@@ -27,9 +27,9 @@ before_script:
- cd ..
- mkdir build
- cd build
- - cmake -DCMAKE_BUILD_TYPE=$config -DBUILD_SHARED_LIBS=OFF -DAVIF_CODEC_AOM=ON -DAVIF_LOCAL_AOM=ON -DAVIF_CODEC_DAV1D=ON -DAVIF_LOCAL_DAV1D=ON -DAVIF_CODEC_RAV1E=ON -DAVIF_LOCAL_RAV1E=ON -DAVIF_CODEC_LIBGAV1=ON -DAVIF_LOCAL_LIBGAV1=ON -DAVIF_BUILD_EXAMPLES=ON -DAVIF_BUILD_APPS=ON -DAVIF_BUILD_TESTS=ON ..
+ - cmake -G Ninja -DCMAKE_BUILD_TYPE=$config -DBUILD_SHARED_LIBS=OFF -DAVIF_CODEC_AOM=ON -DAVIF_LOCAL_AOM=ON -DAVIF_CODEC_DAV1D=ON -DAVIF_LOCAL_DAV1D=ON -DAVIF_CODEC_RAV1E=ON -DAVIF_LOCAL_RAV1E=ON -DAVIF_CODEC_LIBGAV1=ON -DAVIF_LOCAL_LIBGAV1=ON -DAVIF_BUILD_EXAMPLES=ON -DAVIF_BUILD_APPS=ON -DAVIF_BUILD_TESTS=ON ..
script:
- - make
+ - ninja
matrix:
include:
@@ -45,3 +45,28 @@ matrix:
- name: "Clang Release"
compiler: clang
config: Release
+ - &freebsd_common
+ name: "Clang 10 + system libraries"
+ os: freebsd
+ compiler: clang
+ before_install:
+ - | # FreeBSD 12.2 has Clang 10 but TravisCI uses FreeBSD 12.1
+ if [[ $(${CC:-clang} --version) =~ 8.0 ]]; then
+ export CC=clang10 CXX=clang++10
+ sudo pkg install -y llvm10
+ fi
+ before_script:
+ - sudo pkg install -y aom dav1d librav1e ninja
+ - $WRAPPER cmake -B build -G Ninja -DAVIF_{CODEC_{AOM,DAV1D,RAV1E},BUILD_{APPS,TESTS}}=ON
+ script:
+ - $WRAPPER cmake --build build
+ - <<: *freebsd_common
+ name: "GCC 10 + system libraries"
+ compiler: gcc
+ before_install:
+ - sudo pkg upgrade -y gcc10-devel
+ - <<: *freebsd_common
+ name: "Clang Static Analyzer"
+ env: WRAPPER="scan-build10 --status-bugs"
+ before_install:
+ - sudo pkg install -y llvm10
diff --git a/chromium/third_party/libavif/src/CHANGELOG.md b/chromium/third_party/libavif/src/CHANGELOG.md
index 43e8c1b48e9..211584eba6a 100644
--- a/chromium/third_party/libavif/src/CHANGELOG.md
+++ b/chromium/third_party/libavif/src/CHANGELOG.md
@@ -6,6 +6,40 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+## [0.8.1] - 2020-08-05
+
+### Added
+* Add `ignoreAlpha` field to avifRGBImage (linkmauve)
+* Save support in gdk-pixbuf component (novomesk)
+
+### Changed
+* Only ever create one iref box, filled with multiple cdsc boxes (#247)
+* Fix incorrect 16-to-8 monochrome YUV conversion
+* Make decoding optional in CMake, like encoding is
+* Include avif INTERFACE_INCLUDE_DIRECTORIES first (cryptomilk)
+* Set C standard to C99, adjust flags for dav1d (1480c1)
+* Minor cleanup/fixes in reformat.c (wantehchang)
+* Fix a crash in the gdk-pixbuf loader, removed unnecessary asserts (novomesk)
+
+## [0.8.0] - 2020-07-14
+
+### Added
+* Monochrome (YUV400) support **
+ * All encoding/decoding and internal memory savings are done/functional
+ * libaom has a bug in chroma_check() which crashes when encoding monochrome, to be fixed in a future (>v2.0.0) version
+ * rav1e didn't implement CS400 until rav1e v0.4.0
+ * libavif safely falls back to YUV420 when these earlier codec versions are detected
+ * NOTE: If you want to do heavy monochrome testing, wait for newer versions to libaom/rav1e!
+* Image sequence encoding support
+ * Required medium-sized refactors in the codec layers
+ * Image sequences (tracks) now fully support all metadata properly (Exif/XMP/transforms)
+ * avifenc can now encode a series of same-sized images with a consistent framerate, or each with their own custom duration
+* Bilinear upsampling support
+* avifenc: Add --ignore-icc, which avoids embedding the ICC profile found in the source image
+* avifdec: Add --info, which attempts to decode all frames and display their basic info (merge of avifdump)
+* avifenc: add --tilerowslog2 and --tilecolslog2 (wantehchang)
+* Added `contrib` dir for any unofficially supported code contributions (e.g. gdk-pixbuf)
+
### Changed
* CICP Refactor (breaking change!)
* Remove most references to "NCLX", as it is mostly an implementation detail, and the values are really from MPEG-CICP
@@ -19,8 +53,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* Added comments explaining various decisions and citing standards
* Removed ICC inspection code regarding chroma-derived mtxCoeffs; this was overdesigned. Now just honor the assoc. colorPrimaries enum
* Reworked all examples in the README to reflect the new state of things, and clean out some cruft
-* Switch libaom.cmd to point at v2.0.0-rc1
+ * Harvest CICP from AV1 bitstream as a fallback in avifDecoderParse() if nclx box is absent
+* All data other than actual pixel data should be available and valid after a call to avifDecoderParse()
+* Refactor avifDecoder internal structures to properly handle meta boxes in trak boxes (see avifMeta)
+* Update libaom.cmd to point at the v2.0.0 tag
+* Update dav1d.cmd to point at the 0.7.1 tag
* Re-enable cpu-used=7+ in codec_aom when libaom major version > 1
+* Memory allocation failures now cause libavif to abort the process (rather than undefined behavior)
+* Fix to maintain alpha range when decoding an image grid with alpha
+* Improvements to avifyuv to show drift when yuv and rgb depths differ
+* Remove any references to (incorrect) "av01" brand (wantehchang)
+* Set up libaom to use reduced_still_picture_header (wantehchang)
+* Use libaom cpu_used 6 in "good quality" usage mode (wantehchang)
+* Update avifBitsReadUleb128 with latest dav1d code (wantehchang)
+* Set encoder chroma sample position (wantehchang)
## [0.7.3] - 2020-05-04
### Added
@@ -422,7 +468,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Constants `AVIF_VERSION`, `AVIF_VERSION_MAJOR`, `AVIF_VERSION_MINOR`, `AVIF_VERSION_PATCH`
- `avifVersion()` function
-[Unreleased]: https://github.com/AOMediaCodec/libavif/compare/v0.7.3...HEAD
+[Unreleased]: https://github.com/AOMediaCodec/libavif/compare/v0.8.1...HEAD
+[0.8.1]: https://github.com/AOMediaCodec/libavif/compare/v0.8.0...v0.8.1
+[0.8.0]: https://github.com/AOMediaCodec/libavif/compare/v0.7.3...v0.8.0
[0.7.3]: https://github.com/AOMediaCodec/libavif/compare/v0.7.2...v0.7.3
[0.7.2]: https://github.com/AOMediaCodec/libavif/compare/v0.7.1...v0.7.2
[0.7.1]: https://github.com/AOMediaCodec/libavif/compare/v0.7.0...v0.7.1
diff --git a/chromium/third_party/libavif/src/CMakeLists.txt b/chromium/third_party/libavif/src/CMakeLists.txt
index 969c6a3bbc2..df8c538749c 100644
--- a/chromium/third_party/libavif/src/CMakeLists.txt
+++ b/chromium/third_party/libavif/src/CMakeLists.txt
@@ -7,7 +7,10 @@ cmake_minimum_required(VERSION 3.5)
# and find_package()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules")
-project(libavif LANGUAGES C CXX VERSION 0.7.3)
+project(libavif LANGUAGES C VERSION 0.8.1)
+
+# Set C99 as the default
+set(CMAKE_C_STANDARD 99)
# SOVERSION scheme: MAJOR.MINOR.PATCH
# If there was an incompatible interface change:
@@ -16,9 +19,9 @@ project(libavif LANGUAGES C CXX VERSION 0.7.3)
# Increment MINOR. Set PATCH to 0
# If the source code was changed, but there were no interface changes:
# Increment PATCH.
-set(LIBRARY_VERSION_MAJOR 4)
+set(LIBRARY_VERSION_MAJOR 6)
set(LIBRARY_VERSION_MINOR 0)
-set(LIBRARY_VERSION_PATCH 1)
+set(LIBRARY_VERSION_PATCH 0)
set(LIBRARY_VERSION "${LIBRARY_VERSION_MAJOR}.${LIBRARY_VERSION_MINOR}.${LIBRARY_VERSION_PATCH}")
set(LIBRARY_SOVERSION ${LIBRARY_VERSION_MAJOR})
@@ -34,6 +37,10 @@ option(AVIF_LOCAL_DAV1D "Build the dav1d codec by providing your own copy of the
option(AVIF_LOCAL_LIBGAV1 "Build the libgav1 codec by providing your own copy of the repo in ext/libgav1 (see Local Builds in README)" OFF)
option(AVIF_LOCAL_RAV1E "Build the rav1e codec by providing your own copy of the repo in ext/rav1e (see Local Builds in README)" OFF)
+if(AVIF_LOCAL_LIBGAV1)
+ enable_language(CXX)
+endif()
+
# ---------------------------------------------------------------------------------------
# This insanity is for people embedding libavif or making fully static or Windows builds.
# Any proper unix environment should ignore these entire following blocks.
@@ -89,9 +96,9 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
-Wno-missing-noreturn
-Wno-padded
-Wno-sign-conversion
+ -Wno-error=c11-extensions
)
elseif(CMAKE_C_COMPILER_ID MATCHES "GNU")
- add_definitions(-std=gnu99) # Enforce C99 for gcc
MESSAGE(STATUS "libavif: Enabling warnings for GCC")
add_definitions(-Werror -Wall -Wextra)
elseif(CMAKE_C_COMPILER_ID MATCHES "MSVC")
@@ -118,6 +125,7 @@ set(AVIF_SRCS
src/avif.c
src/colr.c
src/mem.c
+ src/obu.c
src/rawdata.c
src/read.c
src/reformat.c
@@ -208,12 +216,9 @@ if(AVIF_CODEC_RAV1E)
)
if(AVIF_LOCAL_RAV1E)
- set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/rav1e/target/release/rav1e.lib")
+ set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/rav1e/target/release/${CMAKE_STATIC_LIBRARY_PREFIX}rav1e${CMAKE_STATIC_LIBRARY_SUFFIX}")
if(NOT EXISTS "${LIB_FILENAME}")
- set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/rav1e/target/release/librav1e.a")
- if(NOT EXISTS "${LIB_FILENAME}")
- message(FATAL_ERROR "libavif: compiled rav1e library is missing (in ext/rav1e/target/release), bailing out")
- endif()
+ message(FATAL_ERROR "libavif: compiled rav1e library is missing (in ext/rav1e/target/release), bailing out")
endif()
set(AVIF_CODEC_INCLUDES ${AVIF_CODEC_INCLUDES}
@@ -244,11 +249,7 @@ if(AVIF_CODEC_AOM)
src/codec_aom.c
)
if(AVIF_LOCAL_AOM)
- if(WIN32)
- set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/aom/build.libavif/aom.lib")
- else()
- set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/aom/build.libavif/libaom.a")
- endif()
+ set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/aom/build.libavif/${CMAKE_STATIC_LIBRARY_PREFIX}aom${CMAKE_STATIC_LIBRARY_SUFFIX}")
if(NOT EXISTS "${LIB_FILENAME}")
message(FATAL_ERROR "libavif: ${LIB_FILENAME} is missing, bailing out")
endif()
@@ -270,7 +271,7 @@ if(AVIF_CODEC_AOM)
endif()
if(NOT AVIF_CODEC_AOM AND NOT AVIF_CODEC_DAV1D AND NOT AVIF_CODEC_LIBGAV1)
- message(FATAL_ERROR "libavif: No decoding library is enabled, bailing out.")
+ message(WARNING "libavif: No decoding library is enabled.")
endif()
add_library(avif ${AVIF_SRCS})
@@ -323,7 +324,13 @@ if(AVIF_BUILD_APPS)
set_target_properties(avifenc PROPERTIES LINKER_LANGUAGE "CXX")
endif()
target_link_libraries(avifenc avif ${AVIF_PLATFORM_LIBRARIES} ${ZLIB_LIBRARY} ${PNG_LIBRARY} ${JPEG_LIBRARY})
- target_include_directories(avifenc PRIVATE apps/shared ${ZLIB_INCLUDE_DIR} ${PNG_PNG_INCLUDE_DIR} ${JPEG_INCLUDE_DIR})
+ target_include_directories(avifenc
+ PRIVATE
+ $<TARGET_PROPERTY:avif,INTERFACE_INCLUDE_DIRECTORIES>
+ ${CMAKE_CURRENT_SOURCE_DIR}/apps/shared
+ ${ZLIB_INCLUDE_DIR}
+ ${PNG_PNG_INCLUDE_DIR}
+ ${JPEG_INCLUDE_DIR})
add_executable(avifdec
apps/avifdec.c
@@ -337,18 +344,13 @@ if(AVIF_BUILD_APPS)
set_target_properties(avifdec PROPERTIES LINKER_LANGUAGE "CXX")
endif()
target_link_libraries(avifdec avif ${AVIF_PLATFORM_LIBRARIES} ${ZLIB_LIBRARY} ${PNG_LIBRARY} ${JPEG_LIBRARY})
- target_include_directories(avifdec PRIVATE apps/shared ${ZLIB_INCLUDE_DIR} ${PNG_PNG_INCLUDE_DIR} ${JPEG_INCLUDE_DIR})
-
- add_executable(avifdump
- apps/avifdump.c
-
- apps/shared/avifutil.c
- )
- if(AVIF_LOCAL_LIBGAV1)
- set_target_properties(avifdump PROPERTIES LINKER_LANGUAGE "CXX")
- endif()
- target_link_libraries(avifdump avif ${AVIF_PLATFORM_LIBRARIES})
- target_include_directories(avifdump PRIVATE apps/shared)
+ target_include_directories(avifdec
+ PRIVATE
+ $<TARGET_PROPERTY:avif,INTERFACE_INCLUDE_DIRECTORIES>
+ ${CMAKE_CURRENT_SOURCE_DIR}/apps/shared
+ ${ZLIB_INCLUDE_DIR}
+ ${PNG_PNG_INCLUDE_DIR}
+ ${JPEG_INCLUDE_DIR})
if(NOT SKIP_INSTALL_APPS AND NOT SKIP_INSTALL_ALL)
install(TARGETS avifenc avifdec
@@ -393,6 +395,7 @@ configure_file(libavif.pc.cmake ${CMAKE_CURRENT_BINARY_DIR}/libavif.pc @ONLY)
if(NOT SKIP_INSTALL_LIBRARIES AND NOT SKIP_INSTALL_ALL)
install(TARGETS avif
EXPORT ${PROJECT_NAME}-config
+ RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}"
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}"
)
@@ -455,4 +458,4 @@ if(WIN32)
endif()
endif()
-# ---------------------------------------------------------------------------------------
+add_subdirectory(contrib)
diff --git a/chromium/third_party/libavif/src/LICENSE b/chromium/third_party/libavif/src/LICENSE
index a4165881521..e8318ef745a 100644
--- a/chromium/third_party/libavif/src/LICENSE
+++ b/chromium/third_party/libavif/src/LICENSE
@@ -47,6 +47,34 @@ THE SOFTWARE.
------------------------------------------------------------------------------
+Files: src/obu.c
+
+Copyright © 2018-2019, VideoLAN and dav1d authors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+
Files: apps/shared/iccjpeg.*
In plain English:
@@ -113,3 +141,30 @@ We are required to state that
"The Graphics Interchange Format(c) is the Copyright property of
CompuServe Incorporated. GIF(sm) is a Service Mark property of
CompuServe Incorporated."
+
+------------------------------------------------------------------------------
+
+Files: contrib/gdk-pixbuf/*
+
+Copyright 2020 Emmanuel Gil Peyrot. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/chromium/third_party/libavif/src/README.md b/chromium/third_party/libavif/src/README.md
index 620b57a589e..f394a402250 100644
--- a/chromium/third_party/libavif/src/README.md
+++ b/chromium/third_party/libavif/src/README.md
@@ -235,25 +235,26 @@ avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
... image->yuvRowBytes;
// Option 2: Convert from interleaved RGB(A)/BGR(A) using a libavif-allocated buffer.
-uint32_t rgbDepth = ...; // [8, 10, 12, 16]; Does not need to match image->depth.
- // If >8, rgb->pixels is uint16_t*
-avifRGBFormat rgbFormat = AVIF_RGB_FORMAT_RGBA; // See choices in avif.h
-avifRGBImage * rgb = avifRGBImageCreate(image->width, image->height, rgbDepth, rgbFormat);
+avifRGBImage rgb;
+avifRGBImageSetDefaults(&rgb, image);
+rgb.depth = ...; // [8, 10, 12, 16]; Does not need to match image->depth.
+ // If >8, rgb->pixels is uint16_t*
+rgb.format = ...; // See choices in avif.h
+avifRGBImageAllocatePixels(&rgb);
... rgb->pixels; // fill these pixels; all channel data must be full range
... rgb->rowBytes;
-avifImageRGBToYUV(image, rgb); // if alpha is present, it will also be copied/converted
-avifRGBImageDestroy(rgb);
+avifImageRGBToYUV(image, &rgb); // if alpha is present, it will also be copied/converted
+avifRGBImageFreePixels(&rgb);
// Option 3: Convert directly from your own pre-existing interleaved RGB(A)/BGR(A) buffer
avifRGBImage rgb;
-rgb.width = image->width;
-rgb.height = image->height;
+avifRGBImageSetDefaults(&rgb, image);
rgb.depth = ...; // [8, 10, 12, 16]; Does not need to match image->depth.
// If >8, rgb->pixels is uint16_t*
rgb.format = ...; // See choices in avif.h, match to your buffer's pixel format
rgb.pixels = ...; // Point at your RGB(A)/BGR(A) pixels here
rgb.rowBytes = ...;
-avifImageRGBToYUV(image, rgb); // if alpha is present, it will also be copied/converted
+avifImageRGBToYUV(image, &rgb); // if alpha is present, it will also be copied/converted
// no need to cleanup avifRGBImage
// Optional: Populate alpha plane
diff --git a/chromium/third_party/libavif/src/apps/avifdec.c b/chromium/third_party/libavif/src/apps/avifdec.c
index e0468ad7e56..94bec871ab5 100644
--- a/chromium/third_party/libavif/src/apps/avifdec.c
+++ b/chromium/third_party/libavif/src/apps/avifdec.c
@@ -8,6 +8,7 @@
#include "avifutil.h"
#include "y4m.h"
+#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
@@ -24,15 +25,85 @@
static void syntax(void)
{
printf("Syntax: avifdec [options] input.avif output.[jpg|jpeg|png|y4m]\n");
+ printf(" avifdec --info input.avif\n");
printf("Options:\n");
printf(" -h,--help : Show syntax help\n");
printf(" -c,--codec C : AV1 codec to use (choose from versions list below)\n");
printf(" -d,--depth D : Output depth [8,16]. (PNG only; For y4m, depth is retained, and JPEG is always 8bpc)\n");
printf(" -q,--quality Q : Output quality [0-100]. (JPEG only, default: %d)\n", DEFAULT_JPEG_QUALITY);
+ printf(" -u,--upsampling U : Chroma upsampling (for 420/422). bilinear (default) or nearest\n");
+ printf(" -i,--info : Decode all frames and display all image information instead of saving to disk\n");
printf("\n");
avifPrintVersions();
}
+static int info(const char * inputFilename)
+{
+ FILE * inputFile = fopen(inputFilename, "rb");
+ if (!inputFile) {
+ fprintf(stderr, "Cannot open file for read: %s\n", inputFilename);
+ return 1;
+ }
+ fseek(inputFile, 0, SEEK_END);
+ size_t inputFileSize = ftell(inputFile);
+ fseek(inputFile, 0, SEEK_SET);
+
+ if (inputFileSize < 1) {
+ fprintf(stderr, "File too small: %s\n", inputFilename);
+ fclose(inputFile);
+ return 1;
+ }
+
+ avifRWData raw = AVIF_DATA_EMPTY;
+ avifRWDataRealloc(&raw, inputFileSize);
+ if (fread(raw.data, 1, inputFileSize, inputFile) != inputFileSize) {
+ fprintf(stderr, "Failed to read " AVIF_FMT_ZU " bytes: %s\n", inputFileSize, inputFilename);
+ fclose(inputFile);
+ avifRWDataFree(&raw);
+ return 1;
+ }
+
+ fclose(inputFile);
+ inputFile = NULL;
+
+ avifDecoder * decoder = avifDecoderCreate();
+ avifResult result = avifDecoderParse(decoder, (avifROData *)&raw);
+ if (result == AVIF_RESULT_OK) {
+ printf("Image decoded: %s\n", inputFilename);
+
+ int frameIndex = 0;
+ avifBool firstImage = AVIF_TRUE;
+ while (avifDecoderNextImage(decoder) == AVIF_RESULT_OK) {
+ if (firstImage) {
+ firstImage = AVIF_FALSE;
+ avifImageDump(decoder->image);
+
+ printf(" * %" PRIu64 " timescales per second, %2.2f seconds (%" PRIu64 " timescales), %d frame%s\n",
+ decoder->timescale,
+ decoder->duration,
+ decoder->durationInTimescales,
+ decoder->imageCount,
+ (decoder->imageCount == 1) ? "" : "s");
+ printf(" * Frames:\n");
+ }
+
+ printf(" * Decoded frame [%d] [pts %2.2f (%" PRIu64 " timescales)] [duration %2.2f (%" PRIu64 " timescales)]\n",
+ frameIndex,
+ decoder->imageTiming.pts,
+ decoder->imageTiming.ptsInTimescales,
+ decoder->imageTiming.duration,
+ decoder->imageTiming.durationInTimescales);
+ ++frameIndex;
+ }
+ } else {
+ printf("ERROR: Failed to decode image: %s\n", avifResultToString(result));
+ }
+
+ avifRWDataFree(&raw);
+ avifDecoderDestroy(decoder);
+ return 0;
+}
+
int main(int argc, char * argv[])
{
const char * inputFilename = NULL;
@@ -40,6 +111,8 @@ int main(int argc, char * argv[])
int requestedDepth = 0;
int jpegQuality = DEFAULT_JPEG_QUALITY;
avifCodecChoice codecChoice = AVIF_CODEC_CHOICE_AUTO;
+ avifBool infoOnly = AVIF_FALSE;
+ avifChromaUpsampling chromaUpsampling = AVIF_CHROMA_UPSAMPLING_BILINEAR;
if (argc < 2) {
syntax();
@@ -81,6 +154,18 @@ int main(int argc, char * argv[])
} else if (jpegQuality > 100) {
jpegQuality = 100;
}
+ } else if (!strcmp(arg, "-u") || !strcmp(arg, "--upsampling")) {
+ NEXTARG();
+ if (!strcmp(arg, "bilinear")) {
+ chromaUpsampling = AVIF_CHROMA_UPSAMPLING_BILINEAR;
+ } else if (!strcmp(arg, "nearest")) {
+ chromaUpsampling = AVIF_CHROMA_UPSAMPLING_NEAREST;
+ } else {
+ fprintf(stderr, "ERROR: invalid upsampling: %s\n", arg);
+ return 1;
+ }
+ } else if (!strcmp(arg, "-i") || !strcmp(arg, "--info")) {
+ infoOnly = AVIF_TRUE;
} else {
// Positional argument
if (!inputFilename) {
@@ -97,11 +182,24 @@ int main(int argc, char * argv[])
++argIndex;
}
- if (!inputFilename || !outputFilename) {
+ if (!inputFilename) {
syntax();
return 1;
}
+ if (infoOnly) {
+ if (!inputFilename || outputFilename) {
+ syntax();
+ return 1;
+ }
+ return info(inputFilename);
+ } else {
+ if (!inputFilename || !outputFilename) {
+ syntax();
+ return 1;
+ }
+ }
+
FILE * inputFile = fopen(inputFilename, "rb");
if (!inputFile) {
fprintf(stderr, "Cannot open file for read: %s\n", inputFilename);
@@ -120,7 +218,7 @@ int main(int argc, char * argv[])
avifRWData raw = AVIF_DATA_EMPTY;
avifRWDataRealloc(&raw, inputFileSize);
if (fread(raw.data, 1, inputFileSize, inputFile) != inputFileSize) {
- fprintf(stderr, "Failed to read %zu bytes: %s\n", inputFileSize, inputFilename);
+ fprintf(stderr, "Failed to read " AVIF_FMT_ZU " bytes: %s\n", inputFileSize, inputFilename);
fclose(inputFile);
avifRWDataFree(&raw);
return 1;
@@ -149,11 +247,11 @@ int main(int argc, char * argv[])
returnCode = 1;
}
} else if (outputFormat == AVIF_APP_FILE_FORMAT_JPEG) {
- if (!avifJPEGWrite(avif, outputFilename, jpegQuality)) {
+ if (!avifJPEGWrite(avif, outputFilename, jpegQuality, chromaUpsampling)) {
returnCode = 1;
}
} else if (outputFormat == AVIF_APP_FILE_FORMAT_PNG) {
- if (!avifPNGWrite(avif, outputFilename, requestedDepth)) {
+ if (!avifPNGWrite(avif, outputFilename, requestedDepth, chromaUpsampling)) {
returnCode = 1;
}
} else {
diff --git a/chromium/third_party/libavif/src/apps/avifdump.c b/chromium/third_party/libavif/src/apps/avifdump.c
deleted file mode 100644
index 737f51523c8..00000000000
--- a/chromium/third_party/libavif/src/apps/avifdump.c
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2019 Joe Drago. All rights reserved.
-// SPDX-License-Identifier: BSD-2-Clause
-
-#include "avif/avif.h"
-
-#include "avifutil.h"
-
-#include <inttypes.h>
-#include <stdio.h>
-#include <string.h>
-
-static int syntax(void)
-{
- printf("Syntax: avifdump input.avif\n");
- return 0;
-}
-
-int main(int argc, char * argv[])
-{
- const char * inputFilename = NULL;
- if (argc != 2) {
- syntax();
- return 0;
- }
- inputFilename = argv[1];
-
- FILE * inputFile = fopen(inputFilename, "rb");
- if (!inputFile) {
- fprintf(stderr, "Cannot open file for read: %s\n", inputFilename);
- return 1;
- }
- fseek(inputFile, 0, SEEK_END);
- size_t inputFileSize = ftell(inputFile);
- fseek(inputFile, 0, SEEK_SET);
-
- if (inputFileSize < 1) {
- fprintf(stderr, "File too small: %s\n", inputFilename);
- fclose(inputFile);
- return 1;
- }
-
- avifRWData raw = AVIF_DATA_EMPTY;
- avifRWDataRealloc(&raw, inputFileSize);
- if (fread(raw.data, 1, inputFileSize, inputFile) != inputFileSize) {
- fprintf(stderr, "Failed to read %zu bytes: %s\n", inputFileSize, inputFilename);
- fclose(inputFile);
- avifRWDataFree(&raw);
- return 1;
- }
-
- fclose(inputFile);
- inputFile = NULL;
-
- avifDecoder * decoder = avifDecoderCreate();
- avifResult result = avifDecoderParse(decoder, (avifROData *)&raw);
- if (result == AVIF_RESULT_OK) {
- printf("Image decoded: %s\n", inputFilename);
-
- int frameIndex = 0;
- avifBool firstImage = AVIF_TRUE;
- while (avifDecoderNextImage(decoder) == AVIF_RESULT_OK) {
- if (firstImage) {
- firstImage = AVIF_FALSE;
- avifImageDump(decoder->image);
-
- printf(" * %" PRIu64 " timescales per second, %2.2f seconds (%" PRIu64 " timescales), %d frame%s\n",
- decoder->timescale,
- decoder->duration,
- decoder->durationInTimescales,
- decoder->imageCount,
- (decoder->imageCount == 1) ? "" : "s");
- printf(" * Frames:\n");
- }
-
- printf(" * Decoded frame [%d] [pts %2.2f (%" PRIu64 " timescales)] [duration %2.2f (%" PRIu64 " timescales)]\n",
- frameIndex,
- decoder->imageTiming.pts,
- decoder->imageTiming.ptsInTimescales,
- decoder->imageTiming.duration,
- decoder->imageTiming.durationInTimescales);
- ++frameIndex;
- }
- } else {
- printf("ERROR: Failed to decode image: %s\n", avifResultToString(result));
- }
-
- avifRWDataFree(&raw);
- avifDecoderDestroy(decoder);
- return 0;
-}
diff --git a/chromium/third_party/libavif/src/apps/avifenc.c b/chromium/third_party/libavif/src/apps/avifenc.c
index 17496d1ca31..e288f713f2f 100644
--- a/chromium/third_party/libavif/src/apps/avifenc.c
+++ b/chromium/third_party/libavif/src/apps/avifenc.c
@@ -12,28 +12,54 @@
#include <stdlib.h>
#include <string.h>
+#if defined(_WIN32)
+// for setmode()
+#include <fcntl.h>
+#include <io.h>
+#endif
+
#define NEXTARG() \
if (((argIndex + 1) == argc) || (argv[argIndex + 1][0] == '-')) { \
fprintf(stderr, "%s requires an argument.", arg); \
- return 1; \
+ goto cleanup; \
} \
arg = argv[++argIndex]
+typedef struct avifInputFile
+{
+ const char * filename;
+ int duration;
+} avifInputFile;
+static avifInputFile stdinFile;
+
+typedef struct avifInput
+{
+ avifInputFile * files;
+ int filesCount;
+ int fileIndex;
+ struct y4mFrameIterator * frameIter;
+ avifPixelFormat requestedFormat;
+ int requestedDepth;
+ avifBool useStdin;
+} avifInput;
+
static void syntax(void)
{
printf("Syntax: avifenc [options] input.[jpg|jpeg|png|y4m] output.avif\n");
printf("Options:\n");
printf(" -h,--help : Show syntax help\n");
printf(" -j,--jobs J : Number of jobs (worker threads, default: 1)\n");
+ printf(" -o,--output FILENAME : Instead of using the last filename given as output, use this filename\n");
printf(" -l,--lossless : Set all defaults to encode losslessly, and emit warnings when settings/input don't allow for it\n");
- printf(" -d,--depth D : Output depth [8,10,12]. (JPEG/PNG only; For y4m, depth is retained)\n");
- printf(" -y,--yuv FORMAT : Output format [default=444, 422, 420]. (JPEG/PNG only; For y4m, format is retained)\n");
+ printf(" -d,--depth D : Output depth [8,10,12]. (JPEG/PNG only; For y4m or stdin, depth is retained)\n");
+ printf(" -y,--yuv FORMAT : Output format [default=444, 422, 420, 400]. (JPEG/PNG only; For y4m or stdin, format is retained)\n");
+ printf(" --stdin : Read y4m frames from stdin instead of files; no input filenames allowed, must set before offering output filename\n");
printf(" --cicp,--nclx P/T/M : Set CICP values (nclx colr box) (3 raw numbers, use -r to set range flag)\n");
printf(" P = enum avifColorPrimaries\n");
printf(" T = enum avifTransferCharacteristics\n");
printf(" M = enum avifMatrixCoefficients\n");
printf(" (use 2 for any you wish to leave unspecified)\n");
- printf(" -r,--range RANGE : YUV range [limited or l, full or f]. (JPEG/PNG only, default: full; For y4m, range is retained)\n");
+ printf(" -r,--range RANGE : YUV range [limited or l, full or f]. (JPEG/PNG only, default: full; For y4m or stdin, range is retained)\n");
printf(" --min Q : Set min quantizer for color (%d-%d, where %d is lossless)\n",
AVIF_QUANTIZER_BEST_QUALITY,
AVIF_QUANTIZER_WORST_QUALITY,
@@ -50,10 +76,16 @@ static void syntax(void)
AVIF_QUANTIZER_BEST_QUALITY,
AVIF_QUANTIZER_WORST_QUALITY,
AVIF_QUANTIZER_LOSSLESS);
+ printf(" --tilerowslog2 R : Set log2 of number of tile rows (0-6, default: 0)\n");
+ printf(" --tilecolslog2 C : Set log2 of number of tile columns (0-6, default: 0)\n");
printf(" -s,--speed S : Encoder speed (%d-%d, slowest-fastest, 'default' or 'd' for codec internal defaults. default speed: 8)\n",
AVIF_SPEED_SLOWEST,
AVIF_SPEED_FASTEST);
printf(" -c,--codec C : AV1 codec to use (choose from versions list below)\n");
+ printf(" --duration D : Set all following frame durations (in timescales) to D; default 1. Can be set multiple times (before supplying each filename)\n");
+ printf(" --timescale,--fps V : Set the timescale to V. If all frames are 1 timescale in length, this is equivalent to frames per second\n");
+ printf(" -k,--keyframe INTERVAL : Set the forced keyframe interval (maximum frames between keyframes). Set to 0 to disable (default).\n");
+ printf(" --ignore-icc : If the input file contains an embedded ICC profile, ignore it (no-op if absent)\n");
printf(" --pasp H,V : Add pasp property (aspect ratio). H=horizontal spacing, V=vertical spacing\n");
printf(" --clap WN,WD,HN,HD,HON,HOD,VON,VOD: Add clap property (clean aperture). Width, Height, HOffset, VOffset (in num/denom pairs)\n");
printf(" --irot ANGLE : Add irot property (rotation). [0-3], makes (90 * ANGLE) degree rotation anti-clockwise\n");
@@ -125,23 +157,91 @@ static int parseU32List(uint32_t output[8], const char * arg)
return index;
}
-int main(int argc, char * argv[])
+static avifInputFile * avifInputGetNextFile(avifInput * input)
{
- const char * inputFilename = NULL;
- const char * outputFilename = NULL;
+ if (input->useStdin) {
+ ungetc(fgetc(stdin), stdin); // Kick stdin to force EOF
+ if (feof(stdin)) {
+ return NULL;
+ }
+ return &stdinFile;
+ }
+
+ if (input->fileIndex >= input->filesCount) {
+ return NULL;
+ }
+ return &input->files[input->fileIndex];
+}
+
+static avifAppFileFormat avifInputReadImage(avifInput * input, avifImage * image, uint32_t * outDepth)
+{
+ if (input->useStdin) {
+ if (feof(stdin)) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+ if (!y4mRead(image, NULL, &input->frameIter)) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+ return AVIF_APP_FILE_FORMAT_Y4M;
+ }
+
+ if (input->fileIndex >= input->filesCount) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+
+ avifAppFileFormat nextInputFormat = avifGuessFileFormat(input->files[input->fileIndex].filename);
+ if (nextInputFormat == AVIF_APP_FILE_FORMAT_Y4M) {
+ if (!y4mRead(image, input->files[input->fileIndex].filename, &input->frameIter)) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+ if (outDepth) {
+ *outDepth = image->depth;
+ }
+ } else if (nextInputFormat == AVIF_APP_FILE_FORMAT_JPEG) {
+ if (!avifJPEGRead(image, input->files[input->fileIndex].filename, input->requestedFormat, input->requestedDepth)) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+ if (outDepth) {
+ *outDepth = 8;
+ }
+ } else if (nextInputFormat == AVIF_APP_FILE_FORMAT_PNG) {
+ if (!avifPNGRead(image, input->files[input->fileIndex].filename, input->requestedFormat, input->requestedDepth, outDepth)) {
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+ } else {
+ fprintf(stderr, "Unrecognized file format: %s\n", input->files[input->fileIndex].filename);
+ return AVIF_APP_FILE_FORMAT_UNKNOWN;
+ }
+
+ if (!input->frameIter) {
+ ++input->fileIndex;
+ }
+ return nextInputFormat;
+}
+
+int main(int argc, char * argv[])
+{
if (argc < 2) {
syntax();
return 1;
}
+ const char * outputFilename = NULL;
+
+ avifInput input;
+ memset(&input, 0, sizeof(input));
+ input.files = malloc(sizeof(avifInputFile) * argc);
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV444;
+
+ int returnCode = 0;
int jobs = 1;
- avifPixelFormat requestedFormat = AVIF_PIXEL_FORMAT_YUV444;
- int requestedDepth = 0;
int minQuantizer = AVIF_QUANTIZER_BEST_QUALITY;
int maxQuantizer = 10; // "High Quality", but not lossless
int minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS;
int maxQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS;
+ int tileRowsLog2 = 0;
+ int tileColsLog2 = 0;
int speed = 8;
int paspCount = 0;
uint32_t paspValues[8]; // only the first two are used
@@ -152,11 +252,22 @@ int main(int argc, char * argv[])
avifCodecChoice codecChoice = AVIF_CODEC_CHOICE_AUTO;
avifRange requestedRange = AVIF_RANGE_FULL;
avifBool lossless = AVIF_FALSE;
+ avifBool ignoreICC = AVIF_FALSE;
avifEncoder * encoder = NULL;
-
+ avifImage * image = NULL;
+ avifImage * nextImage = NULL;
+ avifRWData raw = AVIF_DATA_EMPTY;
+ int duration = 1; // in timescales, stored per-inputFile (see avifInputFile)
+ int timescale = 1; // 1 fps by default
+ int keyframeInterval = 0;
+ avifBool cicpExplicitlySet = AVIF_FALSE;
+
+ // By default, the color profile itself is unspecified, so CP/TC are set (to 2) accordingly.
+ // However, if the end-user doesn't specify any CICP, we will convert to YUV using BT709
+ // coefficients anyway (as MC:2 falls back to MC:1), so we might as well signal it explicitly.
avifColorPrimaries colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED;
avifTransferCharacteristics transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED;
- avifMatrixCoefficients matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED;
+ avifMatrixCoefficients matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT709;
int argIndex = 1;
while (argIndex < argc) {
@@ -164,32 +275,44 @@ int main(int argc, char * argv[])
if (!strcmp(arg, "-h") || !strcmp(arg, "--help")) {
syntax();
- return 0;
+ goto cleanup;
} else if (!strcmp(arg, "-j") || !strcmp(arg, "--jobs")) {
NEXTARG();
jobs = atoi(arg);
if (jobs < 1) {
jobs = 1;
}
+ } else if (!strcmp(arg, "--stdin")) {
+ input.useStdin = AVIF_TRUE;
+ } else if (!strcmp(arg, "-o") || !strcmp(arg, "--output")) {
+ NEXTARG();
+ outputFilename = arg;
} else if (!strcmp(arg, "-d") || !strcmp(arg, "--depth")) {
NEXTARG();
- requestedDepth = atoi(arg);
- if ((requestedDepth != 8) && (requestedDepth != 10) && (requestedDepth != 12)) {
+ input.requestedDepth = atoi(arg);
+ if ((input.requestedDepth != 8) && (input.requestedDepth != 10) && (input.requestedDepth != 12)) {
fprintf(stderr, "ERROR: invalid depth: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "-y") || !strcmp(arg, "--yuv")) {
NEXTARG();
if (!strcmp(arg, "444")) {
- requestedFormat = AVIF_PIXEL_FORMAT_YUV444;
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV444;
} else if (!strcmp(arg, "422")) {
- requestedFormat = AVIF_PIXEL_FORMAT_YUV422;
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV422;
} else if (!strcmp(arg, "420")) {
- requestedFormat = AVIF_PIXEL_FORMAT_YUV420;
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV420;
+ } else if (!strcmp(arg, "400")) {
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV400;
} else {
fprintf(stderr, "ERROR: invalid format: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
+ } else if (!strcmp(arg, "-k") || !strcmp(arg, "--keyframe")) {
+ NEXTARG();
+ keyframeInterval = atoi(arg);
} else if (!strcmp(arg, "--min")) {
NEXTARG();
minQuantizer = atoi(arg);
@@ -226,15 +349,35 @@ int main(int argc, char * argv[])
if (maxQuantizerAlpha > AVIF_QUANTIZER_WORST_QUALITY) {
maxQuantizerAlpha = AVIF_QUANTIZER_WORST_QUALITY;
}
+ } else if (!strcmp(arg, "--tilerowslog2")) {
+ NEXTARG();
+ tileRowsLog2 = atoi(arg);
+ if (tileRowsLog2 < 0) {
+ tileRowsLog2 = 0;
+ }
+ if (tileRowsLog2 > 6) {
+ tileRowsLog2 = 6;
+ }
+ } else if (!strcmp(arg, "--tilecolslog2")) {
+ NEXTARG();
+ tileColsLog2 = atoi(arg);
+ if (tileColsLog2 < 0) {
+ tileColsLog2 = 0;
+ }
+ if (tileColsLog2 > 6) {
+ tileColsLog2 = 6;
+ }
} else if (!strcmp(arg, "--cicp") || !strcmp(arg, "--nclx")) {
NEXTARG();
int cicp[3];
if (!parseCICP(cicp, arg)) {
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
colorPrimaries = (avifColorPrimaries)cicp[0];
transferCharacteristics = (avifTransferCharacteristics)cicp[1];
matrixCoefficients = (avifMatrixCoefficients)cicp[2];
+ cicpExplicitlySet = AVIF_TRUE;
} else if (!strcmp(arg, "-r") || !strcmp(arg, "--range")) {
NEXTARG();
if (!strcmp(arg, "limited") || !strcmp(arg, "l")) {
@@ -243,7 +386,8 @@ int main(int argc, char * argv[])
requestedRange = AVIF_RANGE_FULL;
} else {
fprintf(stderr, "ERROR: Unknown range: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "-s") || !strcmp(arg, "--speed")) {
NEXTARG();
@@ -258,148 +402,174 @@ int main(int argc, char * argv[])
speed = AVIF_SPEED_SLOWEST;
}
}
+ } else if (!strcmp(arg, "--duration")) {
+ NEXTARG();
+ duration = atoi(arg);
+ if (duration < 1) {
+ fprintf(stderr, "ERROR: Invalid duration: %d\n", duration);
+ returnCode = 1;
+ goto cleanup;
+ }
+ } else if (!strcmp(arg, "--timescale") || !strcmp(arg, "--fps")) {
+ NEXTARG();
+ timescale = atoi(arg);
+ if (timescale < 1) {
+ fprintf(stderr, "ERROR: Invalid timescale: %d\n", timescale);
+ returnCode = 1;
+ goto cleanup;
+ }
} else if (!strcmp(arg, "-c") || !strcmp(arg, "--codec")) {
NEXTARG();
codecChoice = avifCodecChoiceFromName(arg);
if (codecChoice == AVIF_CODEC_CHOICE_AUTO) {
fprintf(stderr, "ERROR: Unrecognized codec: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
} else {
const char * codecName = avifCodecName(codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE);
if (codecName == NULL) {
fprintf(stderr, "ERROR: AV1 Codec cannot encode: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
}
+ } else if (!strcmp(arg, "--ignore-icc")) {
+ ignoreICC = AVIF_TRUE;
} else if (!strcmp(arg, "--pasp")) {
NEXTARG();
paspCount = parseU32List(paspValues, arg);
if (paspCount != 2) {
fprintf(stderr, "ERROR: Invalid pasp values: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "--clap")) {
NEXTARG();
clapCount = parseU32List(clapValues, arg);
if (clapCount != 8) {
fprintf(stderr, "ERROR: Invalid clap values: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "--irot")) {
NEXTARG();
irotAngle = (uint8_t)atoi(arg);
if (irotAngle > 3) {
fprintf(stderr, "ERROR: Invalid irot angle: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "--imir")) {
NEXTARG();
imirAxis = (uint8_t)atoi(arg);
if (imirAxis > 1) {
fprintf(stderr, "ERROR: Invalid imir axis: %s\n", arg);
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
} else if (!strcmp(arg, "-l") || !strcmp(arg, "--lossless")) {
lossless = AVIF_TRUE;
// Set defaults, and warn later on if anything looks incorrect
- requestedFormat = AVIF_PIXEL_FORMAT_YUV444; // don't subsample when using AVIF_MATRIX_COEFFICIENTS_IDENTITY
- minQuantizer = AVIF_QUANTIZER_LOSSLESS; // lossless
- maxQuantizer = AVIF_QUANTIZER_LOSSLESS; // lossless
- minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS; // lossless
- maxQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS; // lossless
- codecChoice = AVIF_CODEC_CHOICE_AOM; // rav1e doesn't support lossless transform yet:
- // https://github.com/xiph/rav1e/issues/151
- requestedRange = AVIF_RANGE_FULL; // avoid limited range
+ input.requestedFormat = AVIF_PIXEL_FORMAT_YUV444; // don't subsample when using AVIF_MATRIX_COEFFICIENTS_IDENTITY
+ minQuantizer = AVIF_QUANTIZER_LOSSLESS; // lossless
+ maxQuantizer = AVIF_QUANTIZER_LOSSLESS; // lossless
+ minQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS; // lossless
+ maxQuantizerAlpha = AVIF_QUANTIZER_LOSSLESS; // lossless
+ codecChoice = AVIF_CODEC_CHOICE_AOM; // rav1e doesn't support lossless transform yet:
+ // https://github.com/xiph/rav1e/issues/151
+ requestedRange = AVIF_RANGE_FULL; // avoid limited range
matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_IDENTITY; // this is key for lossless
} else {
// Positional argument
- if (!inputFilename) {
- inputFilename = arg;
- } else if (!outputFilename) {
- outputFilename = arg;
- } else {
- fprintf(stderr, "Too many positional arguments: %s\n", arg);
- syntax();
- return 1;
- }
+ input.files[input.filesCount].filename = arg;
+ input.files[input.filesCount].duration = duration;
+ ++input.filesCount;
}
++argIndex;
}
- if (!inputFilename || !outputFilename) {
+ stdinFile.filename = "(stdin)";
+ stdinFile.duration = duration; // TODO: Allow arbitrary frame durations from stdin?
+
+ if (!outputFilename) {
+ if (((input.useStdin && (input.filesCount == 1)) || (!input.useStdin && (input.filesCount > 1)))) {
+ --input.filesCount;
+ outputFilename = input.files[input.filesCount].filename;
+ }
+ }
+
+ if (!outputFilename || (input.useStdin && (input.filesCount > 0)) || (!input.useStdin && (input.filesCount < 1))) {
syntax();
- return 1;
+ returnCode = 1;
+ goto cleanup;
}
- int returnCode = 0;
- avifImage * avif = avifImageCreateEmpty();
- avifRWData raw = AVIF_DATA_EMPTY;
+#if defined(_WIN32)
+ if (input.useStdin) {
+ setmode(fileno(stdin), O_BINARY);
+ }
+#endif
+
+ image = avifImageCreateEmpty();
+ // Set these in advance so any upcoming RGB -> YUV use the proper coefficients
+ image->colorPrimaries = colorPrimaries;
+ image->transferCharacteristics = transferCharacteristics;
+ image->matrixCoefficients = matrixCoefficients;
+ image->yuvRange = requestedRange;
+
+ avifInputFile * firstFile = avifInputGetNextFile(&input);
uint32_t sourceDepth = 0;
- avifBool sourceWasRGB = AVIF_TRUE;
- avifAppFileFormat inputFormat = avifGuessFileFormat(inputFilename);
+ avifAppFileFormat inputFormat = avifInputReadImage(&input, image, &sourceDepth);
if (inputFormat == AVIF_APP_FILE_FORMAT_UNKNOWN) {
- fprintf(stderr, "Cannot determine input file extension: %s\n", inputFilename);
+ fprintf(stderr, "Cannot determine input file format: %s\n", firstFile->filename);
returnCode = 1;
goto cleanup;
}
+ avifBool sourceWasRGB = (inputFormat != AVIF_APP_FILE_FORMAT_Y4M);
- // Set these in advance so any upcoming RGB -> YUV use the proper coefficients
- avif->colorPrimaries = colorPrimaries;
- avif->transferCharacteristics = transferCharacteristics;
- avif->matrixCoefficients = matrixCoefficients;
- avif->yuvRange = requestedRange;
+ printf("Successfully loaded: %s\n", firstFile->filename);
- if (inputFormat == AVIF_APP_FILE_FORMAT_Y4M) {
- if (!y4mRead(avif, inputFilename)) {
- returnCode = 1;
- goto cleanup;
- }
- sourceDepth = avif->depth;
- sourceWasRGB = AVIF_FALSE;
- } else if (inputFormat == AVIF_APP_FILE_FORMAT_JPEG) {
- if (!avifJPEGRead(avif, inputFilename, requestedFormat, requestedDepth)) {
- returnCode = 1;
- goto cleanup;
- }
- sourceDepth = 8;
- } else if (inputFormat == AVIF_APP_FILE_FORMAT_PNG) {
- if (!avifPNGRead(avif, inputFilename, requestedFormat, requestedDepth, &sourceDepth)) {
- returnCode = 1;
- goto cleanup;
+ if ((image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_IDENTITY) && (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV444)) {
+ // matrixCoefficients was likely set to AVIF_MATRIX_COEFFICIENTS_IDENTITY as a side effect
+ // of --lossless, and Identity is only valid with YUV444. Set this back to the default.
+ image->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT709;
+
+ if (cicpExplicitlySet) {
+ // Only warn if someone explicitly asked for identity.
+ printf("WARNING: matrixCoefficients may not be set to identity(0) when subsampling. Resetting MC to defaults.\n");
}
- } else {
- fprintf(stderr, "Unrecognized file extension: %s\n", inputFilename);
- returnCode = 1;
- goto cleanup;
}
- printf("Successfully loaded: %s\n", inputFilename);
+
+ if (ignoreICC) {
+ avifImageSetProfileICC(image, NULL, 0);
+ }
if (paspCount == 2) {
- avif->transformFlags |= AVIF_TRANSFORM_PASP;
- avif->pasp.hSpacing = paspValues[0];
- avif->pasp.vSpacing = paspValues[1];
+ image->transformFlags |= AVIF_TRANSFORM_PASP;
+ image->pasp.hSpacing = paspValues[0];
+ image->pasp.vSpacing = paspValues[1];
}
if (clapCount == 8) {
- avif->transformFlags |= AVIF_TRANSFORM_CLAP;
- avif->clap.widthN = clapValues[0];
- avif->clap.widthD = clapValues[1];
- avif->clap.heightN = clapValues[2];
- avif->clap.heightD = clapValues[3];
- avif->clap.horizOffN = clapValues[4];
- avif->clap.horizOffD = clapValues[5];
- avif->clap.vertOffN = clapValues[6];
- avif->clap.vertOffD = clapValues[7];
+ image->transformFlags |= AVIF_TRANSFORM_CLAP;
+ image->clap.widthN = clapValues[0];
+ image->clap.widthD = clapValues[1];
+ image->clap.heightN = clapValues[2];
+ image->clap.heightD = clapValues[3];
+ image->clap.horizOffN = clapValues[4];
+ image->clap.horizOffD = clapValues[5];
+ image->clap.vertOffN = clapValues[6];
+ image->clap.vertOffD = clapValues[7];
}
if (irotAngle != 0xff) {
- avif->transformFlags |= AVIF_TRANSFORM_IROT;
- avif->irot.angle = irotAngle;
+ image->transformFlags |= AVIF_TRANSFORM_IROT;
+ image->irot.angle = irotAngle;
}
if (imirAxis != 0xff) {
- avif->transformFlags |= AVIF_TRANSFORM_IMIR;
- avif->imir.axis = imirAxis;
+ image->transformFlags |= AVIF_TRANSFORM_IMIR;
+ image->imir.axis = imirAxis;
}
avifBool usingAOM = AVIF_FALSE;
@@ -407,13 +577,14 @@ int main(int argc, char * argv[])
if (codecName && !strcmp(codecName, "aom")) {
usingAOM = AVIF_TRUE;
}
- avifBool hasAlpha = (avif->alphaPlane && avif->alphaRowBytes);
+ avifBool hasAlpha = (image->alphaPlane && image->alphaRowBytes);
avifBool losslessColorQP = (minQuantizer == AVIF_QUANTIZER_LOSSLESS) && (maxQuantizer == AVIF_QUANTIZER_LOSSLESS);
avifBool losslessAlphaQP = (minQuantizerAlpha == AVIF_QUANTIZER_LOSSLESS) && (maxQuantizerAlpha == AVIF_QUANTIZER_LOSSLESS);
- avifBool depthMatches = (sourceDepth == avif->depth);
- avifBool using444 = (avif->yuvFormat == AVIF_PIXEL_FORMAT_YUV444);
- avifBool usingFullRange = (avif->yuvRange == AVIF_RANGE_FULL);
- avifBool usingIdentityMatrix = (avif->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_IDENTITY);
+ avifBool depthMatches = (sourceDepth == image->depth);
+ avifBool using400 = (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400);
+ avifBool using444 = (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444);
+ avifBool usingFullRange = (image->yuvRange == AVIF_RANGE_FULL);
+ avifBool usingIdentityMatrix = (image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_IDENTITY);
// Guess if the enduser is asking for lossless and enable it so that warnings can be emitted
if (!lossless && losslessColorQP && (!hasAlpha || losslessAlphaQP)) {
@@ -447,12 +618,12 @@ int main(int argc, char * argv[])
fprintf(stderr,
"WARNING: [--lossless] Input depth (%d) does not match output depth (%d). Output might not be lossless.\n",
sourceDepth,
- avif->depth);
+ image->depth);
lossless = AVIF_FALSE;
}
if (sourceWasRGB) {
- if (!using444) {
+ if (!using444 && !using400) {
fprintf(stderr, "WARNING: [--lossless] Input data was RGB and YUV subsampling (-y) isn't YUV444. Output might not be lossless.\n");
lossless = AVIF_FALSE;
}
@@ -462,7 +633,7 @@ int main(int argc, char * argv[])
lossless = AVIF_FALSE;
}
- if (!usingIdentityMatrix) {
+ if (!usingIdentityMatrix && !using400) {
fprintf(stderr, "WARNING: [--lossless] Input data was RGB and matrixCoefficients isn't set to identity (--cicp x/x/0); Output might not be lossless.\n");
lossless = AVIF_FALSE;
}
@@ -474,9 +645,9 @@ int main(int argc, char * argv[])
lossyHint = " (Lossless)";
}
printf("AVIF to be written:%s\n", lossyHint);
- avifImageDump(avif);
+ avifImageDump(image);
- printf("Encoding with AV1 codec '%s' speed [%d], color QP [%d (%s) <-> %d (%s)], alpha QP [%d (%s) <-> %d (%s)], %d worker thread(s), please wait...\n",
+ printf("Encoding with AV1 codec '%s' speed [%d], color QP [%d (%s) <-> %d (%s)], alpha QP [%d (%s) <-> %d (%s)], tileRowsLog2 [%d], tileColsLog2 [%d], %d worker thread(s), please wait...\n",
avifCodecName(codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE),
speed,
minQuantizer,
@@ -487,6 +658,8 @@ int main(int argc, char * argv[])
quantizerString(minQuantizerAlpha),
maxQuantizerAlpha,
quantizerString(maxQuantizerAlpha),
+ tileRowsLog2,
+ tileColsLog2,
jobs);
encoder = avifEncoderCreate();
encoder->maxThreads = jobs;
@@ -494,24 +667,108 @@ int main(int argc, char * argv[])
encoder->maxQuantizer = maxQuantizer;
encoder->minQuantizerAlpha = minQuantizerAlpha;
encoder->maxQuantizerAlpha = maxQuantizerAlpha;
+ encoder->tileRowsLog2 = tileRowsLog2;
+ encoder->tileColsLog2 = tileColsLog2;
encoder->codecChoice = codecChoice;
encoder->speed = speed;
- avifResult encodeResult = avifEncoderWrite(encoder, avif, &raw);
- if (encodeResult != AVIF_RESULT_OK) {
- fprintf(stderr, "ERROR: Failed to encode image: %s\n", avifResultToString(encodeResult));
+ encoder->timescale = (uint64_t)timescale;
+ encoder->keyframeInterval = keyframeInterval;
+
+ uint32_t addImageFlags = AVIF_ADD_IMAGE_FLAG_NONE;
+ if (input.filesCount == 1) {
+ addImageFlags |= AVIF_ADD_IMAGE_FLAG_SINGLE;
+ }
+
+ uint32_t firstDurationInTimescales = firstFile->duration;
+ if (input.useStdin || (input.filesCount > 1)) {
+ printf(" * Encoding frame 1 [%u/%d ts]: %s\n", firstDurationInTimescales, timescale, firstFile->filename);
+ }
+ avifResult addImageResult = avifEncoderAddImage(encoder, image, firstDurationInTimescales, addImageFlags);
+ if (addImageResult != AVIF_RESULT_OK) {
+ fprintf(stderr, "ERROR: Failed to encode image: %s\n", avifResultToString(addImageResult));
+ goto cleanup;
+ }
+
+ avifInputFile * nextFile;
+ int nextImageIndex = -1;
+ while ((nextFile = avifInputGetNextFile(&input)) != NULL) {
+ ++nextImageIndex;
+
+ printf(" * Encoding frame %d [%u/%d ts]: %s\n", nextImageIndex + 1, nextFile->duration, timescale, nextFile->filename);
+
+ nextImage = avifImageCreateEmpty();
+ nextImage->colorPrimaries = image->colorPrimaries;
+ nextImage->transferCharacteristics = image->transferCharacteristics;
+ nextImage->matrixCoefficients = image->matrixCoefficients;
+ nextImage->yuvRange = image->yuvRange;
+
+ avifAppFileFormat nextInputFormat = avifInputReadImage(&input, nextImage, NULL);
+ if (nextInputFormat == AVIF_APP_FILE_FORMAT_UNKNOWN) {
+ returnCode = 1;
+ goto cleanup;
+ }
+
+ // Verify that this frame's properties matches the first frame's properties
+ if ((image->width != nextImage->width) || (image->height != nextImage->height)) {
+ fprintf(stderr,
+ "ERROR: Image sequence dimensions mismatch, [%ux%u] vs [%ux%u]: %s\n",
+ image->width,
+ image->height,
+ nextImage->width,
+ nextImage->height,
+ nextFile->filename);
+ goto cleanup;
+ }
+ if (image->depth != nextImage->depth) {
+ fprintf(stderr, "ERROR: Image sequence depth mismatch, [%u] vs [%u]: %s\n", image->depth, nextImage->depth, nextFile->filename);
+ goto cleanup;
+ }
+ if ((image->colorPrimaries != nextImage->colorPrimaries) ||
+ (image->transferCharacteristics != nextImage->transferCharacteristics) ||
+ (image->matrixCoefficients != nextImage->matrixCoefficients)) {
+ fprintf(stderr,
+ "ERROR: Image sequence CICP mismatch, [%u/%u/%u] vs [%u/%u/%u]: %s\n",
+ image->colorPrimaries,
+ image->matrixCoefficients,
+ image->transferCharacteristics,
+ nextImage->colorPrimaries,
+ nextImage->transferCharacteristics,
+ nextImage->matrixCoefficients,
+ nextFile->filename);
+ goto cleanup;
+ }
+ if (image->yuvRange != nextImage->yuvRange) {
+ fprintf(stderr,
+ "ERROR: Image sequence range mismatch, [%s] vs [%s]: %s\n",
+ (image->yuvRange == AVIF_RANGE_FULL) ? "Full" : "Limited",
+ (nextImage->yuvRange == AVIF_RANGE_FULL) ? "Full" : "Limited",
+ nextFile->filename);
+ goto cleanup;
+ }
+
+ avifResult nextImageResult = avifEncoderAddImage(encoder, nextImage, nextFile->duration, AVIF_ADD_IMAGE_FLAG_NONE);
+ if (nextImageResult != AVIF_RESULT_OK) {
+ fprintf(stderr, "ERROR: Failed to encode image: %s\n", avifResultToString(nextImageResult));
+ goto cleanup;
+ }
+ }
+
+ avifResult finishResult = avifEncoderFinish(encoder, &raw);
+ if (finishResult != AVIF_RESULT_OK) {
+ fprintf(stderr, "ERROR: Failed to finish encoding: %s\n", avifResultToString(finishResult));
goto cleanup;
}
printf("Encoded successfully.\n");
- printf(" * ColorOBU size: %zu bytes\n", encoder->ioStats.colorOBUSize);
- printf(" * AlphaOBU size: %zu bytes\n", encoder->ioStats.alphaOBUSize);
+ printf(" * Color AV1 total size: " AVIF_FMT_ZU " bytes\n", encoder->ioStats.colorOBUSize);
+ printf(" * Alpha AV1 total size: " AVIF_FMT_ZU " bytes\n", encoder->ioStats.alphaOBUSize);
FILE * f = fopen(outputFilename, "wb");
if (!f) {
fprintf(stderr, "ERROR: Failed to open file for write: %s\n", outputFilename);
goto cleanup;
}
if (fwrite(raw.data, 1, raw.size, f) != raw.size) {
- fprintf(stderr, "Failed to write %zu bytes: %s\n", raw.size, outputFilename);
+ fprintf(stderr, "Failed to write " AVIF_FMT_ZU " bytes: %s\n", raw.size, outputFilename);
returnCode = 1;
} else {
printf("Wrote AVIF: %s\n", outputFilename);
@@ -522,7 +779,13 @@ cleanup:
if (encoder) {
avifEncoderDestroy(encoder);
}
- avifImageDestroy(avif);
+ if (image) {
+ avifImageDestroy(image);
+ }
+ if (nextImage) {
+ avifImageDestroy(nextImage);
+ }
avifRWDataFree(&raw);
+ free((void *)input.files);
return returnCode;
}
diff --git a/chromium/third_party/libavif/src/apps/shared/avifjpeg.c b/chromium/third_party/libavif/src/apps/shared/avifjpeg.c
index 33415baa3cf..4a7349987b9 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifjpeg.c
+++ b/chromium/third_party/libavif/src/apps/shared/avifjpeg.c
@@ -108,7 +108,7 @@ cleanup:
return ret;
}
-avifBool avifJPEGWrite(avifImage * avif, const char * outputFilename, int jpegQuality)
+avifBool avifJPEGWrite(avifImage * avif, const char * outputFilename, int jpegQuality, avifChromaUpsampling chromaUpsampling)
{
avifBool ret = AVIF_FALSE;
FILE * f = NULL;
@@ -119,6 +119,7 @@ avifBool avifJPEGWrite(avifImage * avif, const char * outputFilename, int jpegQu
avifRGBImage rgb;
avifRGBImageSetDefaults(&rgb, avif);
rgb.format = AVIF_RGB_FORMAT_RGB;
+ rgb.chromaUpsampling = chromaUpsampling;
rgb.depth = 8;
avifRGBImageAllocatePixels(&rgb);
avifImageYUVToRGB(avif, &rgb);
diff --git a/chromium/third_party/libavif/src/apps/shared/avifjpeg.h b/chromium/third_party/libavif/src/apps/shared/avifjpeg.h
index 3824aea146c..4eefa904bd9 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifjpeg.h
+++ b/chromium/third_party/libavif/src/apps/shared/avifjpeg.h
@@ -7,6 +7,6 @@
#include "avif/avif.h"
avifBool avifJPEGRead(avifImage * avif, const char * inputFilename, avifPixelFormat requestedFormat, uint32_t requestedDepth);
-avifBool avifJPEGWrite(avifImage * avif, const char * outputFilename, int jpegQuality);
+avifBool avifJPEGWrite(avifImage * avif, const char * outputFilename, int jpegQuality, avifChromaUpsampling chromaUpsampling);
#endif // ifndef LIBAVIF_APPS_SHARED_AVIFJPEG_H
diff --git a/chromium/third_party/libavif/src/apps/shared/avifpng.c b/chromium/third_party/libavif/src/apps/shared/avifpng.c
index 1d6c5d47381..5ebf9239a9b 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifpng.c
+++ b/chromium/third_party/libavif/src/apps/shared/avifpng.c
@@ -152,7 +152,7 @@ cleanup:
return readResult;
}
-avifBool avifPNGWrite(avifImage * avif, const char * outputFilename, uint32_t requestedDepth)
+avifBool avifPNGWrite(avifImage * avif, const char * outputFilename, uint32_t requestedDepth, avifChromaUpsampling chromaUpsampling)
{
volatile avifBool writeResult = AVIF_FALSE;
png_structp png = NULL;
@@ -204,6 +204,7 @@ avifBool avifPNGWrite(avifImage * avif, const char * outputFilename, uint32_t re
avifRGBImageSetDefaults(&rgb, avif);
rgb.depth = rgbDepth;
+ rgb.chromaUpsampling = chromaUpsampling;
avifRGBImageAllocatePixels(&rgb);
avifImageYUVToRGB(avif, &rgb);
rowPointers = (png_bytep *)malloc(sizeof(png_bytep) * rgb.height);
diff --git a/chromium/third_party/libavif/src/apps/shared/avifpng.h b/chromium/third_party/libavif/src/apps/shared/avifpng.h
index 12b68ad3e24..38f87e0ad2f 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifpng.h
+++ b/chromium/third_party/libavif/src/apps/shared/avifpng.h
@@ -8,6 +8,6 @@
// if (requestedDepth == 0), do best-fit
avifBool avifPNGRead(avifImage * avif, const char * inputFilename, avifPixelFormat requestedFormat, uint32_t requestedDepth, uint32_t * outPNGDepth);
-avifBool avifPNGWrite(avifImage * avif, const char * outputFilename, uint32_t requestedDepth);
+avifBool avifPNGWrite(avifImage * avif, const char * outputFilename, uint32_t requestedDepth, avifChromaUpsampling chromaUpsampling);
#endif // ifndef LIBAVIF_APPS_SHARED_AVIFPNG_H
diff --git a/chromium/third_party/libavif/src/apps/shared/avifutil.c b/chromium/third_party/libavif/src/apps/shared/avifutil.c
index 23bf06fd7f0..1341728673d 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifutil.c
+++ b/chromium/third_party/libavif/src/apps/shared/avifutil.c
@@ -19,9 +19,9 @@ void avifImageDump(avifImage * avif)
printf(" * Transfer Char. : %d\n", avif->transferCharacteristics);
printf(" * Matrix Coeffs. : %d\n", avif->matrixCoefficients);
- printf(" * ICC Profile : %s (%zu bytes)\n", (avif->icc.size > 0) ? "Present" : "Absent", avif->icc.size);
- printf(" * XMP Metadata : %s (%zu bytes)\n", (avif->xmp.size > 0) ? "Present" : "Absent", avif->xmp.size);
- printf(" * EXIF Metadata : %s (%zu bytes)\n", (avif->exif.size > 0) ? "Present" : "Absent", avif->exif.size);
+ printf(" * ICC Profile : %s (" AVIF_FMT_ZU " bytes)\n", (avif->icc.size > 0) ? "Present" : "Absent", avif->icc.size);
+ printf(" * XMP Metadata : %s (" AVIF_FMT_ZU " bytes)\n", (avif->xmp.size > 0) ? "Present" : "Absent", avif->xmp.size);
+ printf(" * EXIF Metadata : %s (" AVIF_FMT_ZU " bytes)\n", (avif->exif.size > 0) ? "Present" : "Absent", avif->exif.size);
if (avif->transformFlags == AVIF_TRANSFORM_NONE) {
printf(" * Transformations: None\n");
diff --git a/chromium/third_party/libavif/src/apps/shared/avifutil.h b/chromium/third_party/libavif/src/apps/shared/avifutil.h
index 83f08ee58cf..5db081b0fbc 100644
--- a/chromium/third_party/libavif/src/apps/shared/avifutil.h
+++ b/chromium/third_party/libavif/src/apps/shared/avifutil.h
@@ -6,6 +6,20 @@
#include "avif/avif.h"
+/*
+ * The %z format specifier is not available with Visual Studios before 2013 and
+ * mingw-w64 toolchains with `__USE_MINGW_ANSI_STDIO` not set to 1.
+ * Hence the %I format specifier must be used instead to print out `size_t`.
+ * Newer Visual Studios and mingw-w64 toolchains built with the commit
+ * mentioned with c99 set as the standard supports the %z specifier properly.
+ * Related mingw-w64 commit: bfd33f6c0ec5e652cc9911857dd1492ece8d8383
+ */
+#if (defined(_MSVC) && _MSVC < 1800) || (defined(__USE_MINGW_ANSI_STDIO) && __USE_MINGW_ANSI_STDIO == 0)
+# define AVIF_FMT_ZU "%Iu"
+#else
+# define AVIF_FMT_ZU "%zu"
+#endif
+
void avifImageDump(avifImage * avif);
void avifPrintVersions(void);
diff --git a/chromium/third_party/libavif/src/apps/shared/y4m.c b/chromium/third_party/libavif/src/apps/shared/y4m.c
index 72e97a23602..9e6b39a1cea 100644
--- a/chromium/third_party/libavif/src/apps/shared/y4m.c
+++ b/chromium/third_party/libavif/src/apps/shared/y4m.c
@@ -10,6 +10,21 @@
#include <stdlib.h>
#include <string.h>
+#define Y4M_MAX_LINE_SIZE 2048 // Arbitrary limit. Y4M headers should be much smaller than this
+
+struct y4mFrameIterator
+{
+ int width;
+ int height;
+ int depth;
+ avifBool hasAlpha;
+ avifPixelFormat format;
+ avifRange range;
+
+ FILE * inputFile;
+ const char * displayFilename;
+};
+
static avifBool y4mColorSpaceParse(const char * formatString, avifPixelFormat * format, int * depth, avifBool * hasAlpha)
{
*hasAlpha = AVIF_FALSE;
@@ -80,6 +95,21 @@ static avifBool y4mColorSpaceParse(const char * formatString, avifPixelFormat *
*depth = 8;
return AVIF_TRUE;
}
+ if (!strcmp(formatString, "Cmono")) {
+ *format = AVIF_PIXEL_FORMAT_YUV400;
+ *depth = 8;
+ return AVIF_TRUE;
+ }
+ if (!strcmp(formatString, "Cmono10")) {
+ *format = AVIF_PIXEL_FORMAT_YUV400;
+ *depth = 10;
+ return AVIF_TRUE;
+ }
+ if (!strcmp(formatString, "Cmono12")) {
+ *format = AVIF_PIXEL_FORMAT_YUV400;
+ *depth = 12;
+ return AVIF_TRUE;
+ }
return AVIF_FALSE;
}
@@ -102,6 +132,31 @@ static avifBool getHeaderString(uint8_t * p, uint8_t * end, char * out, size_t m
return AVIF_TRUE;
}
+static int y4mReadLine(FILE * inputFile, avifRWData * raw, const char * displayFilename)
+{
+ static const int maxBytes = Y4M_MAX_LINE_SIZE;
+ int bytesRead = 0;
+ uint8_t * front = raw->data;
+
+ for (;;) {
+ if (fread(front, 1, 1, inputFile) != 1) {
+ fprintf(stderr, "Failed to read line: %s\n", displayFilename);
+ break;
+ }
+
+ ++bytesRead;
+ if (bytesRead >= maxBytes) {
+ break;
+ }
+
+ if (*front == '\n') {
+ return bytesRead;
+ }
+ ++front;
+ }
+ return -1;
+}
+
#define ADVANCE(BYTES) \
do { \
p += BYTES; \
@@ -109,136 +164,136 @@ static avifBool getHeaderString(uint8_t * p, uint8_t * end, char * out, size_t m
goto cleanup; \
} while (0)
-avifBool y4mRead(avifImage * avif, const char * inputFilename)
+avifBool y4mRead(avifImage * avif, const char * inputFilename, struct y4mFrameIterator ** iter)
{
- FILE * inputFile = fopen(inputFilename, "rb");
- if (!inputFile) {
- fprintf(stderr, "Cannot open file for read: %s\n", inputFilename);
- return AVIF_FALSE;
- }
- fseek(inputFile, 0, SEEK_END);
- size_t inputFileSize = ftell(inputFile);
- fseek(inputFile, 0, SEEK_SET);
+ avifBool result = AVIF_FALSE;
- if (inputFileSize < 10) {
- fprintf(stderr, "File too small: %s\n", inputFilename);
- fclose(inputFile);
- return AVIF_FALSE;
- }
+ struct y4mFrameIterator frame;
+ frame.width = -1;
+ frame.height = -1;
+ frame.depth = -1;
+ frame.hasAlpha = AVIF_FALSE;
+ frame.format = AVIF_PIXEL_FORMAT_NONE;
+ frame.range = AVIF_RANGE_LIMITED;
+ frame.inputFile = NULL;
+ frame.displayFilename = inputFilename;
avifRWData raw = AVIF_DATA_EMPTY;
- avifRWDataRealloc(&raw, inputFileSize);
- if (fread(raw.data, 1, inputFileSize, inputFile) != inputFileSize) {
- fprintf(stderr, "Failed to read %zu bytes: %s\n", inputFileSize, inputFilename);
- fclose(inputFile);
- avifRWDataFree(&raw);
- return AVIF_FALSE;
- }
+ avifRWDataRealloc(&raw, Y4M_MAX_LINE_SIZE);
- avifBool result = AVIF_FALSE;
+ if (iter && *iter) {
+ // Continue reading FRAMEs from this y4m stream
+ memcpy(&frame, *iter, sizeof(struct y4mFrameIterator));
+ } else {
+ // Open a fresh y4m and read its header
- fclose(inputFile);
- inputFile = NULL;
+ if (inputFilename) {
+ frame.inputFile = fopen(inputFilename, "rb");
+ if (!frame.inputFile) {
+ fprintf(stderr, "Cannot open file for read: %s\n", inputFilename);
+ goto cleanup;
+ }
+ } else {
+ frame.inputFile = stdin;
+ frame.displayFilename = "(stdin)";
+ }
- uint8_t * end = raw.data + raw.size;
- uint8_t * p = raw.data;
+ int headerBytes = y4mReadLine(frame.inputFile, &raw, frame.displayFilename);
+ if (headerBytes < 0) {
+ fprintf(stderr, "Y4M header too large: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
+ if (headerBytes < 10) {
+ fprintf(stderr, "Y4M header too small: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
- if (memcmp(p, "YUV4MPEG2 ", 10) != 0) {
- fprintf(stderr, "Not a y4m file: %s\n", inputFilename);
- avifRWDataFree(&raw);
- return AVIF_FALSE;
- }
- ADVANCE(10); // skip past header
+ uint8_t * end = raw.data + headerBytes;
+ uint8_t * p = raw.data;
- char tmpBuffer[32];
+ if (memcmp(p, "YUV4MPEG2 ", 10) != 0) {
+ fprintf(stderr, "Not a y4m file: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
+ ADVANCE(10); // skip past header
- int width = -1;
- int height = -1;
- int depth = -1;
- avifBool hasAlpha = AVIF_FALSE;
- avifPixelFormat format = AVIF_PIXEL_FORMAT_NONE;
- avifRange range = AVIF_RANGE_LIMITED;
- while (p != end) {
- switch (*p) {
- case 'W': // width
- width = atoi((const char *)p + 1);
- break;
- case 'H': // height
- height = atoi((const char *)p + 1);
- break;
- case 'C': // color space
- if (!getHeaderString(p, end, tmpBuffer, 31)) {
- fprintf(stderr, "Bad y4m header: %s\n", inputFilename);
- goto cleanup;
- }
- if (!y4mColorSpaceParse(tmpBuffer, &format, &depth, &hasAlpha)) {
- fprintf(stderr, "Unsupported y4m pixel format: %s\n", inputFilename);
- goto cleanup;
- }
- break;
- case 'X':
- if (!getHeaderString(p, end, tmpBuffer, 31)) {
- fprintf(stderr, "Bad y4m header: %s\n", inputFilename);
- goto cleanup;
- }
- if (!strcmp(tmpBuffer, "XCOLORRANGE=FULL")) {
- range = AVIF_RANGE_FULL;
- }
- break;
- default:
+ char tmpBuffer[32];
+
+ while (p != end) {
+ switch (*p) {
+ case 'W': // width
+ frame.width = atoi((const char *)p + 1);
+ break;
+ case 'H': // height
+ frame.height = atoi((const char *)p + 1);
+ break;
+ case 'C': // color space
+ if (!getHeaderString(p, end, tmpBuffer, 31)) {
+ fprintf(stderr, "Bad y4m header: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
+ if (!y4mColorSpaceParse(tmpBuffer, &frame.format, &frame.depth, &frame.hasAlpha)) {
+ fprintf(stderr, "Unsupported y4m pixel format: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
+ break;
+ case 'X':
+ if (!getHeaderString(p, end, tmpBuffer, 31)) {
+ fprintf(stderr, "Bad y4m header: %s\n", frame.displayFilename);
+ goto cleanup;
+ }
+ if (!strcmp(tmpBuffer, "XCOLORRANGE=FULL")) {
+ frame.range = AVIF_RANGE_FULL;
+ }
+ break;
+ default:
+ break;
+ }
+
+ // Advance past header section
+ while ((*p != '\n') && (*p != ' ')) {
+ ADVANCE(1);
+ }
+ if (*p == '\n') {
+ // Done with y4m header
break;
- }
+ }
- // Advance past header section
- while ((*p != '\n') && (*p != ' ')) {
ADVANCE(1);
}
- if (*p == '\n') {
- // Done with y4m header
- break;
- }
- ADVANCE(1);
- }
-
- if (*p != '\n') {
- fprintf(stderr, "Truncated y4m header (no newline): %s\n", inputFilename);
- goto cleanup;
+ if (*p != '\n') {
+ fprintf(stderr, "Truncated y4m header (no newline): %s\n", frame.displayFilename);
+ goto cleanup;
+ }
}
- ADVANCE(1); // advance past newline
- size_t remainingBytes = end - p;
- if (remainingBytes < 6) {
- fprintf(stderr, "Truncated y4m (no room for frame header): %s\n", inputFilename);
+ int frameHeaderBytes = y4mReadLine(frame.inputFile, &raw, frame.displayFilename);
+ if (frameHeaderBytes < 0) {
+ fprintf(stderr, "Y4M frame header too large: %s\n", frame.displayFilename);
goto cleanup;
}
- if (memcmp(p, "FRAME", 5) != 0) {
- fprintf(stderr, "Truncated y4m (no frame): %s\n", inputFilename);
+ if (frameHeaderBytes < 6) {
+ fprintf(stderr, "Y4M frame header too small: %s\n", frame.displayFilename);
goto cleanup;
}
-
- // Advance past frame header
- // TODO: Parse frame overrides similarly to header parsing above?
- while (*p != '\n') {
- ADVANCE(1);
- }
- if (*p != '\n') {
- fprintf(stderr, "Invalid y4m frame header: %s\n", inputFilename);
+ if (memcmp(raw.data, "FRAME", 5) != 0) {
+ fprintf(stderr, "Truncated y4m (no frame): %s\n", frame.displayFilename);
goto cleanup;
}
- ADVANCE(1); // advance past newline
- if ((width < 1) || (height < 1) || ((depth != 8) && (depth != 10) && (depth != 12)) || (format == AVIF_PIXEL_FORMAT_NONE)) {
- fprintf(stderr, "Failed to parse y4m header (not enough information): %s\n", inputFilename);
+ if ((frame.width < 1) || (frame.height < 1) || ((frame.depth != 8) && (frame.depth != 10) && (frame.depth != 12)) ||
+ (frame.format == AVIF_PIXEL_FORMAT_NONE)) {
+ fprintf(stderr, "Failed to parse y4m header (not enough information): %s\n", frame.displayFilename);
goto cleanup;
}
avifImageFreePlanes(avif, AVIF_PLANES_YUV | AVIF_PLANES_A);
- avif->width = width;
- avif->height = height;
- avif->depth = depth;
- avif->yuvFormat = format;
- avif->yuvRange = range;
+ avif->width = frame.width;
+ avif->height = frame.height;
+ avif->depth = frame.depth;
+ avif->yuvFormat = frame.format;
+ avif->yuvRange = frame.range;
avifImageAllocatePlanes(avif, AVIF_PLANES_YUV);
avifPixelFormatInfo info;
@@ -246,39 +301,57 @@ avifBool y4mRead(avifImage * avif, const char * inputFilename)
uint32_t planeBytes[4];
planeBytes[0] = avif->yuvRowBytes[0] * avif->height;
- planeBytes[1] = avif->yuvRowBytes[1] * (avif->height >> info.chromaShiftY);
- planeBytes[2] = avif->yuvRowBytes[2] * (avif->height >> info.chromaShiftY);
- if (hasAlpha) {
+ planeBytes[1] = avif->yuvRowBytes[1] * ((avif->height + info.chromaShiftY) >> info.chromaShiftY);
+ planeBytes[2] = avif->yuvRowBytes[2] * ((avif->height + info.chromaShiftY) >> info.chromaShiftY);
+ if (frame.hasAlpha) {
planeBytes[3] = avif->alphaRowBytes * avif->height;
} else {
planeBytes[3] = 0;
}
- uint32_t bytesNeeded = planeBytes[0] + planeBytes[1] + planeBytes[2] + planeBytes[3];
- remainingBytes = end - p;
- if (bytesNeeded > remainingBytes) {
- fprintf(stderr, "Not enough bytes in y4m for first frame: %s\n", inputFilename);
- goto cleanup;
- }
-
for (int i = 0; i < 3; ++i) {
- memcpy(avif->yuvPlanes[i], p, planeBytes[i]);
- p += planeBytes[i];
+ uint32_t bytesRead = (uint32_t)fread(avif->yuvPlanes[i], 1, planeBytes[i], frame.inputFile);
+ if (bytesRead != planeBytes[i]) {
+ fprintf(stderr, "Failed to read y4m plane (not enough data, wanted %d, got %d): %s\n", planeBytes[i], bytesRead, frame.displayFilename);
+ goto cleanup;
+ }
}
- if (hasAlpha) {
+ if (frame.hasAlpha) {
avifImageAllocatePlanes(avif, AVIF_PLANES_A);
- memcpy(avif->alphaPlane, p, planeBytes[3]);
+ if (fread(avif->alphaPlane, 1, planeBytes[3], frame.inputFile) != planeBytes[3]) {
+ fprintf(stderr, "Failed to read y4m plane (not enough data): %s\n", frame.displayFilename);
+ goto cleanup;
+ }
}
result = AVIF_TRUE;
cleanup:
+ if (iter) {
+ if (*iter) {
+ free(*iter);
+ *iter = NULL;
+ }
+
+ if (result && frame.inputFile) {
+ ungetc(fgetc(frame.inputFile), frame.inputFile); // Kick frame.inputFile to force EOF
+
+ if (!feof(frame.inputFile)) {
+ // Remember y4m state for next time
+ *iter = malloc(sizeof(struct y4mFrameIterator));
+ memcpy(*iter, &frame, sizeof(struct y4mFrameIterator));
+ }
+ }
+ }
+
+ if (inputFilename && frame.inputFile && (!iter || !(*iter))) {
+ fclose(frame.inputFile);
+ }
avifRWDataFree(&raw);
return result;
}
avifBool y4mWrite(avifImage * avif, const char * outputFilename)
{
- avifBool swapUV = AVIF_FALSE;
avifBool hasAlpha = (avif->alphaPlane != NULL) && (avif->alphaRowBytes > 0);
avifBool writeAlpha = AVIF_FALSE;
char * y4mHeaderFormat = NULL;
@@ -304,9 +377,8 @@ avifBool y4mWrite(avifImage * avif, const char * outputFilename)
case AVIF_PIXEL_FORMAT_YUV420:
y4mHeaderFormat = "C420jpeg XYSCSS=420JPEG";
break;
- case AVIF_PIXEL_FORMAT_YV12:
- y4mHeaderFormat = "C420jpeg XYSCSS=420JPEG";
- swapUV = AVIF_TRUE;
+ case AVIF_PIXEL_FORMAT_YUV400:
+ y4mHeaderFormat = "Cmono XYSCSS=400";
break;
case AVIF_PIXEL_FORMAT_NONE:
// will error later; this case is here for warning's sake
@@ -324,9 +396,8 @@ avifBool y4mWrite(avifImage * avif, const char * outputFilename)
case AVIF_PIXEL_FORMAT_YUV420:
y4mHeaderFormat = "C420p10 XYSCSS=420P10";
break;
- case AVIF_PIXEL_FORMAT_YV12:
- y4mHeaderFormat = "C422p10 XYSCSS=422P10";
- swapUV = AVIF_TRUE;
+ case AVIF_PIXEL_FORMAT_YUV400:
+ y4mHeaderFormat = "Cmono10 XYSCSS=400";
break;
case AVIF_PIXEL_FORMAT_NONE:
// will error later; this case is here for warning's sake
@@ -344,9 +415,8 @@ avifBool y4mWrite(avifImage * avif, const char * outputFilename)
case AVIF_PIXEL_FORMAT_YUV420:
y4mHeaderFormat = "C420p12 XYSCSS=420P12";
break;
- case AVIF_PIXEL_FORMAT_YV12:
- y4mHeaderFormat = "C422p12 XYSCSS=422P12";
- swapUV = AVIF_TRUE;
+ case AVIF_PIXEL_FORMAT_YUV400:
+ y4mHeaderFormat = "Cmono12 XYSCSS=400";
break;
case AVIF_PIXEL_FORMAT_NONE:
// will error later; this case is here for warning's sake
@@ -392,16 +462,6 @@ avifBool y4mWrite(avifImage * avif, const char * outputFilename)
planeBytes[0] = avif->yuvRowBytes[0] * avif->height;
planeBytes[1] = avif->yuvRowBytes[1] * (avif->height >> info.chromaShiftY);
planeBytes[2] = avif->yuvRowBytes[2] * (avif->height >> info.chromaShiftY);
- if (swapUV) {
- uint8_t * tmpPtr;
- uint32_t tmp;
- tmpPtr = planes[1];
- tmp = planeBytes[1];
- planes[1] = planes[2];
- planeBytes[1] = planeBytes[2];
- planes[2] = tmpPtr;
- planeBytes[2] = tmp;
- }
for (int i = 0; i < 3; ++i) {
if (fwrite(planes[i], 1, planeBytes[i], f) != planeBytes[i]) {
diff --git a/chromium/third_party/libavif/src/apps/shared/y4m.h b/chromium/third_party/libavif/src/apps/shared/y4m.h
index 851663b9732..45ee109df55 100644
--- a/chromium/third_party/libavif/src/apps/shared/y4m.h
+++ b/chromium/third_party/libavif/src/apps/shared/y4m.h
@@ -6,7 +6,13 @@
#include "avif/avif.h"
-avifBool y4mRead(avifImage * avif, const char * inputFilename);
+// Optionally pass one of these pointers (set to NULL) on a fresh input. If it successfully reads in
+// a frame and sees that there is more data to be read, it will allocate an internal structure remembering
+// the y4m header and FILE position and return it. Pass in this pointer to continue reading frames.
+// The structure will always be freed upon failure or reaching EOF.
+struct y4mFrameIterator;
+
+avifBool y4mRead(avifImage * avif, const char * inputFilename, struct y4mFrameIterator ** iter);
avifBool y4mWrite(avifImage * avif, const char * outputFilename);
#endif // ifndef LIBAVIF_APPS_SHARED_Y4M_H
diff --git a/chromium/third_party/libavif/src/contrib/CMakeLists.txt b/chromium/third_party/libavif/src/contrib/CMakeLists.txt
new file mode 100644
index 00000000000..602e8aa3484
--- /dev/null
+++ b/chromium/third_party/libavif/src/contrib/CMakeLists.txt
@@ -0,0 +1,4 @@
+# Copyright 2020 Joe Drago. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause
+
+add_subdirectory(gdk-pixbuf)
diff --git a/chromium/third_party/libavif/src/contrib/README.md b/chromium/third_party/libavif/src/contrib/README.md
new file mode 100644
index 00000000000..fdb9851df78
--- /dev/null
+++ b/chromium/third_party/libavif/src/contrib/README.md
@@ -0,0 +1,9 @@
+# libavif external contributions
+
+Anything in this directory was contributed by an external author and is not officially maintained by
+libavif directly, nor are there any expectations of continued functionality. Any CMake options
+offered in libavif's CMakeLists that cite files in this subdirectory are guaranteed to be disabled
+by default.
+
+See libavif's `LICENSE` file to learn about any additional license requirements/information for any
+subdirectories in here.
diff --git a/chromium/third_party/libavif/src/contrib/gdk-pixbuf/CMakeLists.txt b/chromium/third_party/libavif/src/contrib/gdk-pixbuf/CMakeLists.txt
new file mode 100644
index 00000000000..22eb7c7fa2b
--- /dev/null
+++ b/chromium/third_party/libavif/src/contrib/gdk-pixbuf/CMakeLists.txt
@@ -0,0 +1,28 @@
+# Copyright 2020 Emmanuel Gil Peyrot. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause
+
+option(AVIF_BUILD_GDK_PIXBUF "Build a gdk-pixbuf loader" OFF)
+if(AVIF_BUILD_GDK_PIXBUF)
+ find_package(PkgConfig)
+ if(PKG_CONFIG_FOUND)
+ pkg_search_module(GDK_PIXBUF gdk-pixbuf-2.0)
+ if(GDK_PIXBUF_FOUND)
+ set(GDK_PIXBUF_SRCS
+ loader.c
+ )
+ add_library(pixbufloader-avif ${GDK_PIXBUF_SRCS})
+
+ # This is required because glib stupidly uses invalid #define names, such as __G_LIB_H__…
+ add_definitions(-Wno-reserved-id-macro)
+ target_link_libraries(pixbufloader-avif PUBLIC ${GDK_PIXBUF_LIBRARIES} avif)
+ target_include_directories(pixbufloader-avif PUBLIC ${GDK_PIXBUF_INCLUDE_DIRS})
+
+ pkg_get_variable(GDK_PIXBUF_MODULEDIR gdk-pixbuf-2.0 gdk_pixbuf_moduledir)
+ install(TARGETS pixbufloader-avif DESTINATION ${GDK_PIXBUF_MODULEDIR})
+ else()
+ message(WARNING "gdk-pixbuf loader: disabled due to missing gdk-pixbuf-2.0")
+ endif()
+ else()
+ message(WARNING "gdk-pixbuf loader: disabled due to missing pkg-config")
+ endif()
+endif()
diff --git a/chromium/third_party/libavif/src/contrib/gdk-pixbuf/loader.c b/chromium/third_party/libavif/src/contrib/gdk-pixbuf/loader.c
new file mode 100644
index 00000000000..af0e77599d1
--- /dev/null
+++ b/chromium/third_party/libavif/src/contrib/gdk-pixbuf/loader.c
@@ -0,0 +1,395 @@
+/* Copyright 2020 Emmanuel Gil Peyrot. All rights reserved.
+ SPDX-License-Identifier: BSD-2-Clause
+*/
+
+#include <avif/avif.h>
+
+#define GDK_PIXBUF_ENABLE_BACKEND
+#include <gdk-pixbuf/gdk-pixbuf-io.h>
+
+G_MODULE_EXPORT void fill_vtable (GdkPixbufModule * module);
+G_MODULE_EXPORT void fill_info (GdkPixbufFormat * info);
+
+struct avif_context {
+ GdkPixbuf * pixbuf;
+
+ GdkPixbufModuleSizeFunc size_func;
+ GdkPixbufModuleUpdatedFunc updated_func;
+ GdkPixbufModulePreparedFunc prepared_func;
+ gpointer user_data;
+
+ avifDecoder * decoder;
+ GByteArray * data;
+ GBytes * bytes;
+};
+
+static void avif_context_free(struct avif_context * context)
+{
+ if (!context)
+ return;
+
+ if (context->decoder) {
+ avifDecoderDestroy(context->decoder);
+ context->decoder = NULL;
+ }
+
+ if (context->data) {
+ g_byte_array_unref(context->data);
+ context->bytes = NULL;
+ }
+
+ if (context->bytes) {
+ g_bytes_unref(context->bytes);
+ context->bytes = NULL;
+ }
+
+ if (context->pixbuf) {
+ g_object_unref(context->pixbuf);
+ context->pixbuf = NULL;
+ }
+
+ g_free(context);
+}
+
+static gboolean avif_context_try_load(struct avif_context * context, GError ** error)
+{
+ avifResult ret;
+ avifDecoder * decoder = context->decoder;
+ avifImage * image;
+ avifRGBImage rgb;
+ avifROData raw;
+ int width, height;
+
+ raw.data = g_bytes_get_data(context->bytes, &raw.size);
+
+ ret = avifDecoderParse(decoder, &raw);
+ if (ret != AVIF_RESULT_OK) {
+ g_set_error(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_CORRUPT_IMAGE,
+ "Couldn’t decode image: %s", avifResultToString(ret));
+ return FALSE;
+ }
+
+ if (decoder->imageCount > 1) {
+ g_set_error_literal(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_FAILED,
+ "Image sequences not yet implemented");
+ return FALSE;
+ }
+
+ ret = avifDecoderNextImage(decoder);
+ if (ret == AVIF_RESULT_NO_IMAGES_REMAINING) {
+ /* No more images, bail out. Verify that you got the expected amount of images decoded. */
+ return TRUE;
+ } else if (ret != AVIF_RESULT_OK) {
+ g_set_error(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_FAILED,
+ "Failed to decode all frames: %s", avifResultToString(ret));
+ return FALSE;
+ }
+
+ image = decoder->image;
+ width = image->width;
+ height = image->height;
+
+ if (context->size_func) {
+ (*context->size_func)(&width, &height, context->user_data);
+ }
+
+ if (width == 0 || height == 0) {
+ g_set_error_literal(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_CORRUPT_IMAGE,
+ "Transformed AVIF has zero width or height");
+ return FALSE;
+ }
+
+ if (!context->pixbuf) {
+ int bits_per_sample = 8;
+
+ context->pixbuf = gdk_pixbuf_new(GDK_COLORSPACE_RGB,
+ !!image->alphaPlane, bits_per_sample,
+ image->width, image->height);
+ if (context->pixbuf == NULL) {
+ g_set_error_literal(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_INSUFFICIENT_MEMORY,
+ "Insufficient memory to open AVIF file");
+ return FALSE;
+ }
+ context->prepared_func(context->pixbuf, NULL, context->user_data);
+ }
+
+ avifRGBImageSetDefaults(&rgb, image);
+ rgb.depth = 8;
+ rgb.format = image->alphaPlane ? AVIF_RGB_FORMAT_RGBA : AVIF_RGB_FORMAT_RGB;
+ rgb.pixels = gdk_pixbuf_get_pixels(context->pixbuf);
+ rgb.rowBytes = gdk_pixbuf_get_rowstride(context->pixbuf);
+
+ ret = avifImageYUVToRGB(image, &rgb);
+ if (ret != AVIF_RESULT_OK) {
+ g_set_error(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_FAILED,
+ "Failed to convert YUV to RGB: %s", avifResultToString(ret));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gpointer begin_load(GdkPixbufModuleSizeFunc size_func,
+ GdkPixbufModulePreparedFunc prepared_func,
+ GdkPixbufModuleUpdatedFunc updated_func,
+ gpointer user_data, GError ** error)
+{
+ struct avif_context * context;
+ avifDecoder * decoder;
+
+ g_assert(prepared_func != NULL);
+
+ decoder = avifDecoderCreate();
+ if (!decoder) {
+ g_set_error_literal(error, GDK_PIXBUF_ERROR, GDK_PIXBUF_ERROR_INSUFFICIENT_MEMORY,
+ "Couldn’t allocate memory for decoder");
+ return NULL;
+ }
+
+ context = g_new0(struct avif_context, 1);
+ if (!context)
+ return NULL;
+
+ context->size_func = size_func;
+ context->updated_func = updated_func;
+ context->prepared_func = prepared_func;
+ context->user_data = user_data;
+
+ context->decoder = decoder;
+ context->data = g_byte_array_sized_new(40000);
+
+ return context;
+}
+
+static gboolean stop_load(gpointer data, GError ** error)
+{
+ struct avif_context * context = (struct avif_context *) data;
+ gboolean ret;
+
+ context->bytes = g_byte_array_free_to_bytes(context->data);
+ context->data = NULL;
+ ret = avif_context_try_load(context, error);
+
+ avif_context_free(context);
+
+ return ret;
+}
+
+static gboolean load_increment(gpointer data, const guchar * buf, guint size, GError ** error)
+{
+ struct avif_context * context = (struct avif_context *) data;
+ g_byte_array_append(context->data, buf, size);
+ if (error)
+ *error = NULL;
+ return TRUE;
+}
+
+static gboolean avif_is_save_option_supported (const gchar *option_key)
+{
+ if (g_strcmp0(option_key, "quality") == 0) {
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static gboolean avif_image_saver(FILE *f,
+ GdkPixbuf *pixbuf,
+ gchar **keys,
+ gchar **values,
+ GError **error)
+{
+ int width, height, min_quantizer, max_quantizer, alpha_quantizer;
+ long quality = 52; /* default; must be between 0 and 100 */
+ gboolean save_alpha;
+ avifImage *avif;
+ avifRGBImage rgb;
+ avifResult res;
+ avifRWData raw = AVIF_DATA_EMPTY;
+ avifEncoder *encoder;
+ guint maxThreads;
+
+ if (f == NULL || pixbuf == NULL) {
+ return FALSE;
+ }
+
+ if (keys && *keys) {
+ gchar **kiter = keys;
+ gchar **viter = values;
+
+ while (*kiter) {
+ if (strcmp(*kiter, "quality") == 0) {
+ char *endptr = NULL;
+ quality = strtol(*viter, &endptr, 10);
+
+ if (endptr == *viter) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_BAD_OPTION,
+ "AVIF quality must be a value between 0 and 100; value “%s” could not be parsed.",
+ *viter);
+
+ return FALSE;
+ }
+
+ if (quality < 0 || quality > 100) {
+
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_BAD_OPTION,
+ "AVIF quality must be a value between 0 and 100; value “%d” is not allowed.",
+ (int)quality);
+
+ return FALSE;
+ }
+ } else {
+ g_warning("Unrecognized parameter (%s) passed to AVIF saver.", *kiter);
+ }
+
+ ++kiter;
+ ++viter;
+ }
+ }
+
+ if (gdk_pixbuf_get_bits_per_sample(pixbuf) != 8) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_UNKNOWN_TYPE,
+ "Sorry, only 8bit images are supported by this AVIF saver");
+ return FALSE;
+ }
+
+ width = gdk_pixbuf_get_width(pixbuf);
+ height = gdk_pixbuf_get_height(pixbuf);
+
+ if ( width == 0 || height == 0) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_CORRUPT_IMAGE,
+ "Empty image, nothing to save");
+ return FALSE;
+ }
+
+ save_alpha = gdk_pixbuf_get_has_alpha(pixbuf);
+
+ if (save_alpha) {
+ if ( gdk_pixbuf_get_n_channels(pixbuf) != 4) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_UNKNOWN_TYPE,
+ "Unsupported number of channels");
+ return FALSE;
+ }
+ }
+ else {
+ if ( gdk_pixbuf_get_n_channels(pixbuf) != 3) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_UNKNOWN_TYPE,
+ "Unsupported number of channels");
+ return FALSE;
+ }
+ }
+
+ max_quantizer = AVIF_QUANTIZER_WORST_QUALITY * ( 100 - CLAMP(quality, 0, 100)) / 100;
+ min_quantizer = 0;
+ alpha_quantizer = 0;
+
+ if ( max_quantizer > 20 ) {
+ min_quantizer = max_quantizer - 20;
+
+ if (max_quantizer > 40) {
+ alpha_quantizer = max_quantizer - 40;
+ }
+ }
+
+ avif = avifImageCreate(width, height, 8, AVIF_PIXEL_FORMAT_YUV420);
+ avif->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT601;
+ avifRGBImageSetDefaults( &rgb, avif);
+
+ rgb.depth = 8;
+ rgb.pixels = (uint8_t*) gdk_pixbuf_read_pixels(pixbuf);
+ rgb.rowBytes = gdk_pixbuf_get_rowstride(pixbuf);
+
+ if (save_alpha) {
+ rgb.format = AVIF_RGB_FORMAT_RGBA;
+ } else {
+ rgb.format = AVIF_RGB_FORMAT_RGB;
+ }
+
+ res = avifImageRGBToYUV(avif, &rgb);
+ if ( res != AVIF_RESULT_OK ) {
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_FAILED,
+ "Problem in RGB->YUV conversion: %s", avifResultToString(res));
+ avifImageDestroy(avif);
+ return FALSE;
+ }
+
+ maxThreads = g_get_num_processors();
+ encoder = avifEncoderCreate();
+
+ encoder->maxThreads = CLAMP(maxThreads, 1, 64);
+ encoder->minQuantizer = min_quantizer;
+ encoder->maxQuantizer = max_quantizer;
+ encoder->minQuantizerAlpha = 0;
+ encoder->maxQuantizerAlpha = alpha_quantizer;
+ encoder->speed = 8;
+
+ res = avifEncoderWrite(encoder, avif, &raw);
+ avifEncoderDestroy(encoder);
+ avifImageDestroy(avif);
+
+ if ( res == AVIF_RESULT_OK ) {
+ fwrite(raw.data, 1, raw.size, f);
+ avifRWDataFree(&raw);
+ return TRUE;
+ }
+
+ g_set_error(error,
+ GDK_PIXBUF_ERROR,
+ GDK_PIXBUF_ERROR_FAILED,
+ "AVIF encoder problem: %s", avifResultToString(res));
+ return FALSE;
+}
+
+
+G_MODULE_EXPORT void fill_vtable(GdkPixbufModule * module)
+{
+ module->begin_load = begin_load;
+ module->stop_load = stop_load;
+ module->load_increment = load_increment;
+ module->is_save_option_supported = avif_is_save_option_supported;
+ module->save = avif_image_saver;
+}
+
+G_MODULE_EXPORT void fill_info(GdkPixbufFormat * info)
+{
+ static GdkPixbufModulePattern signature[] = {
+ { " ftypavif", "zzz ", 100 }, /* file begins with 'ftypavif' at offset 4 */
+ { NULL, NULL, 0 }
+ };
+ static gchar * mime_types[] = {
+ "image/avif",
+ NULL
+ };
+ static gchar * extensions[] = {
+ "avif",
+ NULL
+ };
+
+ info->name = "avif";
+ info->signature = (GdkPixbufModulePattern *)signature;
+ info->description = "AV1 Image File Format";
+ info->mime_types = (gchar **)mime_types;
+ info->extensions = (gchar **)extensions;
+ info->flags = GDK_PIXBUF_FORMAT_WRITABLE | GDK_PIXBUF_FORMAT_THREADSAFE;
+ info->license = "BSD";
+ info->disabled = FALSE;
+}
+
diff --git a/chromium/third_party/libavif/src/ext/aom.cmd b/chromium/third_party/libavif/src/ext/aom.cmd
index 89fa6a8eb0d..e52ede1d464 100755
--- a/chromium/third_party/libavif/src/ext/aom.cmd
+++ b/chromium/third_party/libavif/src/ext/aom.cmd
@@ -5,10 +5,10 @@
: # cmake and ninja must be in your PATH.
-: # If you're running this on Windows, be sure you've already run this (from your VC2017 install dir):
-: # "C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvars64.bat"
+: # If you're running this on Windows, be sure you've already run this (from your VC2019 install dir):
+: # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Auxiliary\Build\vcvars64.bat"
-git clone -b v2.0.0-rc1 --depth 1 https://aomedia.googlesource.com/aom
+git clone -b v2.0.0 --depth 1 https://aomedia.googlesource.com/aom
cd aom
mkdir build.libavif
diff --git a/chromium/third_party/libavif/src/ext/dav1d.cmd b/chromium/third_party/libavif/src/ext/dav1d.cmd
index 6d33b11b2d0..6333a8c35e7 100644
--- a/chromium/third_party/libavif/src/ext/dav1d.cmd
+++ b/chromium/third_party/libavif/src/ext/dav1d.cmd
@@ -5,10 +5,10 @@
: # meson and ninja must be in your PATH.
-: # If you're running this on Windows, be sure you've already run this (from your VC2017 install dir):
-: # "C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvars64.bat"
+: # If you're running this on Windows, be sure you've already run this (from your VC2019 install dir):
+: # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Auxiliary\Build\vcvars64.bat"
-git clone -b 0.6.0 --depth 1 https://code.videolan.org/videolan/dav1d.git
+git clone -b 0.7.1 --depth 1 https://code.videolan.org/videolan/dav1d.git
cd dav1d
mkdir build
diff --git a/chromium/third_party/libavif/src/ext/libgav1.cmd b/chromium/third_party/libavif/src/ext/libgav1.cmd
index 913c81fd39c..cd163cd1904 100755
--- a/chromium/third_party/libavif/src/ext/libgav1.cmd
+++ b/chromium/third_party/libavif/src/ext/libgav1.cmd
@@ -5,14 +5,14 @@
: # cmake and ninja must be in your PATH.
-: # If you're running this on Windows, be sure you've already run this (from your VC2017 install dir):
-: # "C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvars64.bat"
+: # If you're running this on Windows, be sure you've already run this (from your VC2019 install dir):
+: # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Auxiliary\Build\vcvars64.bat"
git clone --single-branch https://chromium.googlesource.com/codecs/libgav1
cd libgav1
git checkout 45a1d76
-git clone https://github.com/abseil/abseil-cpp.git third_party/abseil-cpp
+git clone -b 20200225.2 --depth 1 https://github.com/abseil/abseil-cpp.git third_party/abseil-cpp
mkdir build
cd build
diff --git a/chromium/third_party/libavif/src/ext/rav1e.cmd b/chromium/third_party/libavif/src/ext/rav1e.cmd
index 101db19f632..9956b0b14d1 100644
--- a/chromium/third_party/libavif/src/ext/rav1e.cmd
+++ b/chromium/third_party/libavif/src/ext/rav1e.cmd
@@ -5,13 +5,13 @@
: # cargo must be in your PATH. (use rustup or brew to install)
-: # If you're running this on Windows targeting Rust's windows-msvc, be sure you've already run this (from your VC2017 install dir):
-: # "C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvars64.bat"
+: # If you're running this on Windows targeting Rust's windows-msvc, be sure you've already run this (from your VC2019 install dir):
+: # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Auxiliary\Build\vcvars64.bat"
: #
: # Also, the error that "The target windows-msvc is not supported yet" can safely be ignored provided that rav1e/target/release
: # contains rav1e.h and rav1e.lib.
-git clone -b v0.3.1 --depth 1 https://github.com/xiph/rav1e.git
+git clone -b 0.3 --depth 1 https://github.com/xiph/rav1e.git
cd rav1e
cargo install cbindgen
diff --git a/chromium/third_party/libavif/src/include/avif/avif.h b/chromium/third_party/libavif/src/include/avif/avif.h
index 15be2b598f4..3dba45154eb 100644
--- a/chromium/third_party/libavif/src/include/avif/avif.h
+++ b/chromium/third_party/libavif/src/include/avif/avif.h
@@ -15,8 +15,8 @@ extern "C" {
// Constants
#define AVIF_VERSION_MAJOR 0
-#define AVIF_VERSION_MINOR 7
-#define AVIF_VERSION_PATCH 3
+#define AVIF_VERSION_MINOR 8
+#define AVIF_VERSION_PATCH 1
#define AVIF_VERSION (AVIF_VERSION_MAJOR * 10000) + (AVIF_VERSION_MINOR * 100) + AVIF_VERSION_PATCH
typedef int avifBool;
@@ -48,7 +48,7 @@ enum avifChannelIndex
AVIF_CHAN_G = 1,
AVIF_CHAN_B = 2,
- // yuvPlanes - These are always correct, even if UV is flipped when encoded (YV12)
+ // yuvPlanes
AVIF_CHAN_Y = 0,
AVIF_CHAN_U = 1,
AVIF_CHAN_V = 2
@@ -131,27 +131,36 @@ typedef enum avifPixelFormat
AVIF_PIXEL_FORMAT_YUV444,
AVIF_PIXEL_FORMAT_YUV422,
AVIF_PIXEL_FORMAT_YUV420,
- AVIF_PIXEL_FORMAT_YV12
+ AVIF_PIXEL_FORMAT_YUV400
} avifPixelFormat;
const char * avifPixelFormatToString(avifPixelFormat format);
typedef struct avifPixelFormatInfo
{
+ avifBool monochrome;
int chromaShiftX;
int chromaShiftY;
- int aomIndexU; // maps U plane to AOM-side plane index
- int aomIndexV; // maps V plane to AOM-side plane index
} avifPixelFormatInfo;
void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info);
// ---------------------------------------------------------------------------
+// avifChromaSamplePosition
+
+typedef enum avifChromaSamplePosition
+{
+ AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN = 0,
+ AVIF_CHROMA_SAMPLE_POSITION_VERTICAL = 1,
+ AVIF_CHROMA_SAMPLE_POSITION_COLOCATED = 2
+} avifChromaSamplePosition;
+
+// ---------------------------------------------------------------------------
// avifRange
typedef enum avifRange
{
AVIF_RANGE_LIMITED = 0,
- AVIF_RANGE_FULL = 0x80
+ AVIF_RANGE_FULL = 1
} avifRange;
// ---------------------------------------------------------------------------
@@ -179,7 +188,7 @@ typedef enum avifColorPrimaries
// outPrimaries: rX, rY, gX, gY, bX, bY, wX, wY
void avifColorPrimariesGetValues(avifColorPrimaries acp, float outPrimaries[8]);
-avifColorPrimaries avifColorPrimariesFind(float inPrimaries[8], const char ** outName);
+avifColorPrimaries avifColorPrimariesFind(const float inPrimaries[8], const char ** outName);
typedef enum avifTransferCharacteristics
{
@@ -294,14 +303,15 @@ typedef struct avifImage
avifPixelFormat yuvFormat;
avifRange yuvRange;
+ avifChromaSamplePosition yuvChromaSamplePosition;
uint8_t * yuvPlanes[AVIF_PLANE_COUNT_YUV];
uint32_t yuvRowBytes[AVIF_PLANE_COUNT_YUV];
- avifBool decoderOwnsYUVPlanes;
+ avifBool imageOwnsYUVPlanes;
avifRange alphaRange;
uint8_t * alphaPlane;
uint32_t alphaRowBytes;
- avifBool decoderOwnsAlphaPlane;
+ avifBool imageOwnsAlphaPlane;
// ICC Profile
avifRWData icc;
@@ -335,8 +345,8 @@ typedef struct avifImage
} avifImage;
avifImage * avifImageCreate(int width, int height, int depth, avifPixelFormat yuvFormat);
-avifImage * avifImageCreateEmpty(void); // helper for making an image to decode into
-void avifImageCopy(avifImage * dstImage, avifImage * srcImage); // deep copy
+avifImage * avifImageCreateEmpty(void); // helper for making an image to decode into
+void avifImageCopy(avifImage * dstImage, const avifImage * srcImage, uint32_t planes); // deep copy
void avifImageDestroy(avifImage * image);
void avifImageSetProfileICC(avifImage * image, const uint8_t * icc, size_t iccSize);
@@ -375,27 +385,37 @@ typedef enum avifRGBFormat
uint32_t avifRGBFormatChannelCount(avifRGBFormat format);
avifBool avifRGBFormatHasAlpha(avifRGBFormat format);
+typedef enum avifChromaUpsampling
+{
+ AVIF_CHROMA_UPSAMPLING_BILINEAR = 0, // Slower and prettier (default)
+ AVIF_CHROMA_UPSAMPLING_NEAREST = 1 // Faster and uglier
+} avifChromaUpsampling;
+
typedef struct avifRGBImage
{
- uint32_t width; // must match associated avifImage
- uint32_t height; // must match associated avifImage
- uint32_t depth; // legal depths [8, 10, 12, 16]. if depth>8, pixels must be uint16_t internally
- avifRGBFormat format; // all channels are always full range
+ uint32_t width; // must match associated avifImage
+ uint32_t height; // must match associated avifImage
+ uint32_t depth; // legal depths [8, 10, 12, 16]. if depth>8, pixels must be uint16_t internally
+ avifRGBFormat format; // all channels are always full range
+ avifChromaUpsampling chromaUpsampling; // How to upsample non-4:4:4 UV (ignored for 444) when converting to RGB.
+ // Unused when converting to YUV. avifRGBImageSetDefaults() prefers quality over speed.
+ avifBool ignoreAlpha; // Used for XRGB formats, treats formats containing alpha (such as ARGB) as if they were
+ // RGB, treating the alpha bits as if they were all 1.
uint8_t * pixels;
uint32_t rowBytes;
} avifRGBImage;
-void avifRGBImageSetDefaults(avifRGBImage * rgb, avifImage * image);
-uint32_t avifRGBImagePixelSize(avifRGBImage * rgb);
+void avifRGBImageSetDefaults(avifRGBImage * rgb, const avifImage * image);
+uint32_t avifRGBImagePixelSize(const avifRGBImage * rgb);
// Convenience functions. If you supply your own pixels/rowBytes, you do not need to use these.
void avifRGBImageAllocatePixels(avifRGBImage * rgb);
void avifRGBImageFreePixels(avifRGBImage * rgb);
// The main conversion functions
-avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb);
-avifResult avifImageYUVToRGB(avifImage * image, avifRGBImage * rgb);
+avifResult avifImageRGBToYUV(avifImage * image, const avifRGBImage * rgb);
+avifResult avifImageYUVToRGB(const avifImage * image, avifRGBImage * rgb);
// ---------------------------------------------------------------------------
// YUV Utils
@@ -435,7 +455,7 @@ typedef struct avifReformatState
avifReformatMode mode;
} avifReformatState;
-avifBool avifPrepareReformatState(avifImage * image, avifRGBImage * rgb, avifReformatState * state);
+avifBool avifPrepareReformatState(const avifImage * image, const avifRGBImage * rgb, avifReformatState * state);
// ---------------------------------------------------------------------------
// Codec selection
@@ -459,6 +479,30 @@ typedef enum avifCodecFlags
const char * avifCodecName(avifCodecChoice choice, uint32_t requiredFlags);
avifCodecChoice avifCodecChoiceFromName(const char * name);
+typedef struct avifCodecConfigurationBox
+{
+ // [skipped; is constant] unsigned int (1)marker = 1;
+ // [skipped; is constant] unsigned int (7)version = 1;
+
+ uint8_t seqProfile; // unsigned int (3) seq_profile;
+ uint8_t seqLevelIdx0; // unsigned int (5) seq_level_idx_0;
+ uint8_t seqTier0; // unsigned int (1) seq_tier_0;
+ uint8_t highBitdepth; // unsigned int (1) high_bitdepth;
+ uint8_t twelveBit; // unsigned int (1) twelve_bit;
+ uint8_t monochrome; // unsigned int (1) monochrome;
+ uint8_t chromaSubsamplingX; // unsigned int (1) chroma_subsampling_x;
+ uint8_t chromaSubsamplingY; // unsigned int (1) chroma_subsampling_y;
+ uint8_t chromaSamplePosition; // unsigned int (2) chroma_sample_position;
+
+ // unsigned int (3)reserved = 0;
+ // unsigned int (1)initial_presentation_delay_present;
+ // if (initial_presentation_delay_present) {
+ // unsigned int (4)initial_presentation_delay_minus_one;
+ // } else {
+ // unsigned int (4)reserved = 0;
+ // }
+} avifCodecConfigurationBox;
+
// ---------------------------------------------------------------------------
// avifDecoder
@@ -507,11 +551,19 @@ typedef struct avifDecoder
// Set this via avifDecoderSetSource().
avifDecoderSource requestedSource;
- // The current decoded image, owned by the decoder. Is invalid if the decoder hasn't run or has run
- // out of images. The YUV and A contents of this image are likely owned by the decoder, so be
- // sure to copy any data inside of this image before advancing to the next image or reusing the
- // decoder. It is legal to call avifImageYUVToRGB() on this in between calls to avifDecoderNextImage(),
- // but use avifImageCopy() if you want to make a permanent copy of this image's contents.
+ // All decoded image data; owned by the decoder. All information in this image is incrementally
+ // added and updated as avifDecoder*() functions are called. After a successful call to
+ // avifDecoderParse(), all values in decoder->image (other than the planes/rowBytes themselves)
+ // will be pre-populated with all information found in the outer AVIF container, prior to any
+ // AV1 decoding. If the contents of the inner AV1 payload disagree with the outer container,
+ // these values may change after calls to avifDecoderRead(),avifDecoderNextImage(), or
+ // avifDecoderNthImage().
+ //
+ // The YUV and A contents of this image are likely owned by the decoder, so be sure to copy any
+ // data inside of this image before advancing to the next image or reusing the decoder. It is
+ // legal to call avifImageYUVToRGB() on this in between calls to avifDecoderNextImage(), but use
+ // avifImageCopy() if you want to make a complete, permanent copy of this image's YUV content or
+ // metadata.
avifImage * image;
// Counts and timing for the current image in an image sequence. Uninteresting for single image files.
@@ -522,25 +574,13 @@ typedef struct avifDecoder
double duration; // in seconds (durationInTimescales / timescale)
uint64_t durationInTimescales; // duration in "timescales"
- // The width and height as reported by the AVIF container, if any. There is no guarantee
- // these match the decoded images; they are merely reporting what is independently offered
- // from the container's boxes.
- // * If decoding an "item" and the item is associated with an ImageSpatialExtentsBox,
- // it will use the box's width/height
- // * Else if decoding tracks, these will be the integer portions of the TrackHeaderBox width/height
- // * Else both will be set to 0.
- uint32_t containerWidth;
- uint32_t containerHeight;
-
- // The bit depth as reported by the AVIF container, if any. There is no guarantee
- // this matches the decoded images; it is merely reporting what is independently offered
- // from the container's boxes.
- // * If decoding an "item" and the item is associated with an av1C property,
- // it will use the box's depth flags.
- // * Else if decoding tracks and there is a SampleDescriptionBox of type av01 containing an av1C box,
- // it will use the box's depth flags.
- // * Else it will be set to 0.
- uint32_t containerDepth;
+ // This is true when avifDecoderParse() detects an alpha plane. Use this to find out if alpha is
+ // present after a successful call to avifDecoderParse(), but prior to any call to
+ // avifDecoderNextImage() or avifDecoderNthImage(), as decoder->image->alphaPlane won't exist yet.
+ avifBool alphaPresent;
+
+ // Set this to true to disable support of grid images. If a grid image is encountered, AVIF_RESULT_BMFF_PARSE_FAILED will be returned.
+ avifBool disableGridImages;
// stats from the most recent read, possibly 0s if reading an image sequence
avifIOStats ioStats;
@@ -553,7 +593,7 @@ avifDecoder * avifDecoderCreate(void);
void avifDecoderDestroy(avifDecoder * decoder);
// Simple interface to decode a single image, independent of the decoder afterwards (decoder may be deestroyed).
-avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifROData * input);
+avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, const avifROData * input);
// Multi-function alternative to avifDecoderRead() for image sequences and gaining direct access
// to the decoder's YUV buffers (for performance's sake). Data passed into avifDecoderParse() is NOT
@@ -574,7 +614,7 @@ avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifROData
// items in a file containing both, but switch between sources without having to
// Parse again. Normally AVIF_DECODER_SOURCE_AUTO is enough for the common path.
avifResult avifDecoderSetSource(avifDecoder * decoder, avifDecoderSource source);
-avifResult avifDecoderParse(avifDecoder * decoder, avifROData * input);
+avifResult avifDecoderParse(avifDecoder * decoder, const avifROData * input);
avifResult avifDecoderNextImage(avifDecoder * decoder);
avifResult avifDecoderNthImage(avifDecoder * decoder, uint32_t frameIndex);
avifResult avifDecoderReset(avifDecoder * decoder);
@@ -582,11 +622,12 @@ avifResult avifDecoderReset(avifDecoder * decoder);
// Keyframe information
// frameIndex - 0-based, matching avifDecoder->imageIndex, bound by avifDecoder->imageCount
// "nearest" keyframe means the keyframe prior to this frame index (returns frameIndex if it is a keyframe)
-avifBool avifDecoderIsKeyframe(avifDecoder * decoder, uint32_t frameIndex);
-uint32_t avifDecoderNearestKeyframe(avifDecoder * decoder, uint32_t frameIndex);
+avifBool avifDecoderIsKeyframe(const avifDecoder * decoder, uint32_t frameIndex);
+uint32_t avifDecoderNearestKeyframe(const avifDecoder * decoder, uint32_t frameIndex);
// Timing helper - This does not change the current image or invoke the codec (safe to call repeatedly)
-avifResult avifDecoderNthImageTiming(avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming);
+// This function may be used after a successful call to avifDecoderParse().
+avifResult avifDecoderNthImageTiming(const avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming);
// ---------------------------------------------------------------------------
// avifEncoder
@@ -617,6 +658,8 @@ typedef struct avifEncoder
int tileRowsLog2;
int tileColsLog2;
int speed;
+ int keyframeInterval; // How many frames between automatic forced keyframes; 0 to disable (default).
+ uint64_t timescale; // timescale of the media (Hz)
// stats from the most recent write
avifIOStats ioStats;
@@ -626,15 +669,40 @@ typedef struct avifEncoder
} avifEncoder;
avifEncoder * avifEncoderCreate(void);
-avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData * output);
+avifResult avifEncoderWrite(avifEncoder * encoder, const avifImage * image, avifRWData * output);
void avifEncoderDestroy(avifEncoder * encoder);
+enum avifAddImageFlags
+{
+ AVIF_ADD_IMAGE_FLAG_NONE = 0,
+
+ // Force this frame to be a keyframe (sync frame).
+ AVIF_ADD_IMAGE_FLAG_FORCE_KEYFRAME = (1 << 0),
+
+ // Use this flag when encoding a single image. Signals "still_picture" to AV1 encoders, which
+ // tweaks various compression rules. This is enabled automatically when using the
+ // avifEncoderWrite() single-image encode path.
+ AVIF_ADD_IMAGE_FLAG_SINGLE = (1 << 1)
+};
+
+// Multi-function alternative to avifEncoderWrite() for image sequences.
+//
+// Usage / function call order is:
+// * avifEncoderCreate()
+// * Set encoder->timescale (Hz) correctly
+// * avifEncoderAddImage() ... [repeatedly; at least once]
+// * avifEncoderFinish()
+// * avifEncoderDestroy()
+//
+avifResult avifEncoderAddImage(avifEncoder * encoder, const avifImage * image, uint64_t durationInTimescales, uint32_t addImageFlags);
+avifResult avifEncoderFinish(avifEncoder * encoder, avifRWData * output);
+
// Helpers
-avifBool avifImageUsesU16(avifImage * image);
+avifBool avifImageUsesU16(const avifImage * image);
// Returns AVIF_TRUE if input begins with a valid FileTypeBox (ftyp) that supports
// either the brand 'avif' or 'avis' (or both), without performing any allocations.
-avifBool avifPeekCompatibleFileType(avifROData * input);
+avifBool avifPeekCompatibleFileType(const avifROData * input);
#ifdef __cplusplus
} // extern "C"
diff --git a/chromium/third_party/libavif/src/include/avif/internal.h b/chromium/third_party/libavif/src/include/avif/internal.h
index 33d19f00a23..992bc553ee4 100644
--- a/chromium/third_party/libavif/src/include/avif/internal.h
+++ b/chromium/third_party/libavif/src/include/avif/internal.h
@@ -41,7 +41,7 @@ uint32_t avifNTOHL(uint32_t l);
uint64_t avifHTON64(uint64_t l);
uint64_t avifNTOH64(uint64_t l);
-void avifCalcYUVCoefficients(avifImage * image, float * outR, float * outG, float * outB);
+void avifCalcYUVCoefficients(const avifImage * image, float * outR, float * outG, float * outB);
#define AVIF_ARRAY_DECLARE(TYPENAME, ITEMSTYPE, ITEMSNAME) \
typedef struct TYPENAME \
@@ -87,16 +87,16 @@ avifBool avifReformatAlpha(const avifAlphaParams * const params);
// ---------------------------------------------------------------------------
// avifCodecDecodeInput
-typedef struct avifSample
+typedef struct avifDecodeSample
{
avifROData data;
avifBool sync; // is sync sample (keyframe)
-} avifSample;
-AVIF_ARRAY_DECLARE(avifSampleArray, avifSample, sample);
+} avifDecodeSample;
+AVIF_ARRAY_DECLARE(avifDecodeSampleArray, avifDecodeSample, sample);
typedef struct avifCodecDecodeInput
{
- avifSampleArray samples;
+ avifDecodeSampleArray samples;
avifBool alpha; // if true, this is decoding an alpha plane
} avifCodecDecodeInput;
@@ -104,39 +104,41 @@ avifCodecDecodeInput * avifCodecDecodeInputCreate(void);
void avifCodecDecodeInputDestroy(avifCodecDecodeInput * decodeInput);
// ---------------------------------------------------------------------------
-// avifCodec (abstraction layer to use different AV1 implementations)
+// avifCodecEncodeOutput
+
+typedef struct avifEncodeSample
+{
+ avifRWData data;
+ avifBool sync; // is sync sample (keyframe)
+} avifEncodeSample;
+AVIF_ARRAY_DECLARE(avifEncodeSampleArray, avifEncodeSample, sample);
-typedef struct avifCodecConfigurationBox
+typedef struct avifCodecEncodeOutput
{
- // [skipped; is constant] unsigned int (1)marker = 1;
- // [skipped; is constant] unsigned int (7)version = 1;
-
- uint8_t seqProfile; // unsigned int (3) seq_profile;
- uint8_t seqLevelIdx0; // unsigned int (5) seq_level_idx_0;
- uint8_t seqTier0; // unsigned int (1) seq_tier_0;
- uint8_t highBitdepth; // unsigned int (1) high_bitdepth;
- uint8_t twelveBit; // unsigned int (1) twelve_bit;
- uint8_t monochrome; // unsigned int (1) monochrome;
- uint8_t chromaSubsamplingX; // unsigned int (1) chroma_subsampling_x;
- uint8_t chromaSubsamplingY; // unsigned int (1) chroma_subsampling_y;
- uint8_t chromaSamplePosition; // unsigned int (2) chroma_sample_position;
-
- // unsigned int (3)reserved = 0;
- // unsigned int (1)initial_presentation_delay_present;
- // if (initial_presentation_delay_present) {
- // unsigned int (4)initial_presentation_delay_minus_one;
- // } else {
- // unsigned int (4)reserved = 0;
- // }
-} avifCodecConfigurationBox;
+ avifEncodeSampleArray samples;
+} avifCodecEncodeOutput;
+
+avifCodecEncodeOutput * avifCodecEncodeOutputCreate(void);
+void avifCodecEncodeOutputAddSample(avifCodecEncodeOutput * encodeOutput, const uint8_t * data, size_t len, avifBool sync);
+void avifCodecEncodeOutputDestroy(avifCodecEncodeOutput * encodeOutput);
+
+// ---------------------------------------------------------------------------
+// avifCodec (abstraction layer to use different AV1 implementations)
struct avifCodec;
struct avifCodecInternal;
typedef avifBool (*avifCodecOpenFunc)(struct avifCodec * codec, uint32_t firstSampleIndex);
typedef avifBool (*avifCodecGetNextImageFunc)(struct avifCodec * codec, avifImage * image);
-// avifCodecEncodeImageFunc: if either OBU* is null, skip its encode. alpha should always be lossless
-typedef avifBool (*avifCodecEncodeImageFunc)(struct avifCodec * codec, avifImage * image, avifEncoder * encoder, avifRWData * obu, avifBool alpha);
+// EncodeImage and EncodeFinish are not required to always emit a sample, but when all images are
+// encoded and EncodeFinish is called, the number of samples emitted must match the number of submitted frames.
+typedef avifBool (*avifCodecEncodeImageFunc)(struct avifCodec * codec,
+ avifEncoder * encoder,
+ const avifImage * image,
+ avifBool alpha,
+ uint32_t addImageFlags,
+ avifCodecEncodeOutput * output);
+typedef avifBool (*avifCodecEncodeFinishFunc)(struct avifCodec * codec, avifCodecEncodeOutput * output);
typedef void (*avifCodecDestroyInternalFunc)(struct avifCodec * codec);
typedef struct avifCodec
@@ -148,6 +150,7 @@ typedef struct avifCodec
avifCodecOpenFunc open;
avifCodecGetNextImageFunc getNextImage;
avifCodecEncodeImageFunc encodeImage;
+ avifCodecEncodeFinishFunc encodeFinish;
avifCodecDestroyInternalFunc destroyInternal;
} avifCodec;
@@ -182,11 +185,11 @@ typedef struct avifROStream
const uint8_t * avifROStreamCurrent(avifROStream * stream);
void avifROStreamStart(avifROStream * stream, avifROData * raw);
-size_t avifROStreamOffset(avifROStream * stream);
+size_t avifROStreamOffset(const avifROStream * stream);
void avifROStreamSetOffset(avifROStream * stream, size_t offset);
-avifBool avifROStreamHasBytesLeft(avifROStream * stream, size_t byteCount);
-size_t avifROStreamRemainingBytes(avifROStream * stream);
+avifBool avifROStreamHasBytesLeft(const avifROStream * stream, size_t byteCount);
+size_t avifROStreamRemainingBytes(const avifROStream * stream);
avifBool avifROStreamSkip(avifROStream * stream, size_t byteCount);
avifBool avifROStreamRead(avifROStream * stream, uint8_t * data, size_t size);
avifBool avifROStreamReadU16(avifROStream * stream, uint16_t * v);
@@ -195,8 +198,8 @@ avifBool avifROStreamReadUX8(avifROStream * stream, uint64_t * v, uint64_t facto
avifBool avifROStreamReadU64(avifROStream * stream, uint64_t * v);
avifBool avifROStreamReadString(avifROStream * stream, char * output, size_t outputSize);
avifBool avifROStreamReadBoxHeader(avifROStream * stream, avifBoxHeader * header);
-avifBool avifROStreamReadVersionAndFlags(avifROStream * stream, uint8_t * version, uint8_t * flags); // flags is an optional uint8_t[3]
-avifBool avifROStreamReadAndEnforceVersion(avifROStream * stream, uint8_t enforcedVersion); // currently discards flags
+avifBool avifROStreamReadVersionAndFlags(avifROStream * stream, uint8_t * version, uint32_t * flags); // version and flags ptrs are both optional
+avifBool avifROStreamReadAndEnforceVersion(avifROStream * stream, uint8_t enforcedVersion); // currently discards flags
typedef struct avifRWStream
{
@@ -206,19 +209,42 @@ typedef struct avifRWStream
uint8_t * avifRWStreamCurrent(avifRWStream * stream);
void avifRWStreamStart(avifRWStream * stream, avifRWData * raw);
-size_t avifRWStreamOffset(avifRWStream * stream);
+size_t avifRWStreamOffset(const avifRWStream * stream);
void avifRWStreamSetOffset(avifRWStream * stream, size_t offset);
void avifRWStreamFinishWrite(avifRWStream * stream);
-void avifRWStreamWrite(avifRWStream * stream, const uint8_t * data, size_t size);
+void avifRWStreamWrite(avifRWStream * stream, const void * data, size_t size);
void avifRWStreamWriteChars(avifRWStream * stream, const char * chars, size_t size);
-avifBoxMarker avifRWStreamWriteBox(avifRWStream * stream, const char * type, int version /* -1 for "not a FullBox" */, size_t contentSize);
+avifBoxMarker avifRWStreamWriteBox(avifRWStream * stream, const char * type, size_t contentSize);
+avifBoxMarker avifRWStreamWriteFullBox(avifRWStream * stream, const char * type, size_t contentSize, int version, uint32_t flags);
void avifRWStreamFinishBox(avifRWStream * stream, avifBoxMarker marker);
void avifRWStreamWriteU8(avifRWStream * stream, uint8_t v);
void avifRWStreamWriteU16(avifRWStream * stream, uint16_t v);
void avifRWStreamWriteU32(avifRWStream * stream, uint32_t v);
+void avifRWStreamWriteU64(avifRWStream * stream, uint64_t v);
void avifRWStreamWriteZeros(avifRWStream * stream, size_t byteCount);
+// This is to make it clear that the box size is currently unknown, and will be determined later (with a call to avifRWStreamFinishBox)
+#define AVIF_BOX_SIZE_TBD 0
+
+typedef struct avifSequenceHeader
+{
+ uint32_t maxWidth;
+ uint32_t maxHeight;
+ uint32_t bitDepth;
+ avifPixelFormat yuvFormat;
+ avifChromaSamplePosition chromaSamplePosition;
+ avifColorPrimaries colorPrimaries;
+ avifTransferCharacteristics transferCharacteristics;
+ avifMatrixCoefficients matrixCoefficients;
+ avifRange range;
+} avifSequenceHeader;
+avifBool avifSequenceHeaderParse(avifSequenceHeader * header, const avifROData * sample);
+
+// A maximum image size to avoid out-of-memory errors or integer overflow in
+// (32-bit) int or unsigned int arithmetic operations.
+#define AVIF_MAX_IMAGE_SIZE (16384 * 16384)
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/chromium/third_party/libavif/src/src/avif.c b/chromium/third_party/libavif/src/src/avif.c
index e88d4071de1..98585be194d 100644
--- a/chromium/third_party/libavif/src/src/avif.c
+++ b/chromium/third_party/libavif/src/src/avif.c
@@ -23,8 +23,8 @@ const char * avifPixelFormatToString(avifPixelFormat format)
return "YUV420";
case AVIF_PIXEL_FORMAT_YUV422:
return "YUV422";
- case AVIF_PIXEL_FORMAT_YV12:
- return "YV12";
+ case AVIF_PIXEL_FORMAT_YUV400:
+ return "YUV400";
case AVIF_PIXEL_FORMAT_NONE:
default:
break;
@@ -35,8 +35,6 @@ const char * avifPixelFormatToString(avifPixelFormat format)
void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info)
{
memset(info, 0, sizeof(avifPixelFormatInfo));
- info->aomIndexU = 1;
- info->aomIndexV = 2;
switch (format) {
case AVIF_PIXEL_FORMAT_YUV444:
@@ -54,11 +52,10 @@ void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info)
info->chromaShiftY = 1;
break;
- case AVIF_PIXEL_FORMAT_YV12:
+ case AVIF_PIXEL_FORMAT_YUV400:
info->chromaShiftX = 1;
info->chromaShiftY = 1;
- info->aomIndexU = 2;
- info->aomIndexV = 1;
+ info->monochrome = AVIF_TRUE;
break;
case AVIF_PIXEL_FORMAT_NONE:
@@ -124,7 +121,7 @@ avifImage * avifImageCreateEmpty(void)
return avifImageCreate(0, 0, 0, AVIF_PIXEL_FORMAT_NONE);
}
-void avifImageCopy(avifImage * dstImage, avifImage * srcImage)
+void avifImageCopy(avifImage * dstImage, const avifImage * srcImage, uint32_t planes)
{
avifImageFreePlanes(dstImage, AVIF_PLANES_ALL);
@@ -133,6 +130,7 @@ void avifImageCopy(avifImage * dstImage, avifImage * srcImage)
dstImage->depth = srcImage->depth;
dstImage->yuvFormat = srcImage->yuvFormat;
dstImage->yuvRange = srcImage->yuvRange;
+ dstImage->yuvChromaSamplePosition = srcImage->yuvChromaSamplePosition;
dstImage->alphaRange = srcImage->alphaRange;
dstImage->colorPrimaries = srcImage->colorPrimaries;
@@ -150,41 +148,33 @@ void avifImageCopy(avifImage * dstImage, avifImage * srcImage)
avifImageSetMetadataExif(dstImage, srcImage->exif.data, srcImage->exif.size);
avifImageSetMetadataXMP(dstImage, srcImage->xmp.data, srcImage->xmp.size);
- if (srcImage->yuvPlanes[AVIF_CHAN_Y]) {
+ if ((planes & AVIF_PLANES_YUV) && srcImage->yuvPlanes[AVIF_CHAN_Y]) {
avifImageAllocatePlanes(dstImage, AVIF_PLANES_YUV);
avifPixelFormatInfo formatInfo;
avifGetPixelFormatInfo(srcImage->yuvFormat, &formatInfo);
- int uvHeight = (dstImage->height + formatInfo.chromaShiftY) >> formatInfo.chromaShiftY;
+ uint32_t uvHeight = (dstImage->height + formatInfo.chromaShiftY) >> formatInfo.chromaShiftY;
for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
- int aomPlaneIndex = yuvPlane;
- int planeHeight = dstImage->height;
- if (yuvPlane == AVIF_CHAN_U) {
- aomPlaneIndex = formatInfo.aomIndexU;
- planeHeight = uvHeight;
- } else if (yuvPlane == AVIF_CHAN_V) {
- aomPlaneIndex = formatInfo.aomIndexV;
- planeHeight = uvHeight;
- }
+ uint32_t planeHeight = (yuvPlane == AVIF_CHAN_Y) ? dstImage->height : uvHeight;
- if (!srcImage->yuvRowBytes[aomPlaneIndex]) {
+ if (!srcImage->yuvRowBytes[yuvPlane]) {
// plane is absent. If we're copying from a source without
// them, mimic the source image's state by removing our copy.
- avifFree(dstImage->yuvPlanes[aomPlaneIndex]);
- dstImage->yuvPlanes[aomPlaneIndex] = NULL;
- dstImage->yuvRowBytes[aomPlaneIndex] = 0;
+ avifFree(dstImage->yuvPlanes[yuvPlane]);
+ dstImage->yuvPlanes[yuvPlane] = NULL;
+ dstImage->yuvRowBytes[yuvPlane] = 0;
continue;
}
- for (int j = 0; j < planeHeight; ++j) {
- uint8_t * srcRow = &srcImage->yuvPlanes[aomPlaneIndex][j * srcImage->yuvRowBytes[aomPlaneIndex]];
+ for (uint32_t j = 0; j < planeHeight; ++j) {
+ uint8_t * srcRow = &srcImage->yuvPlanes[yuvPlane][j * srcImage->yuvRowBytes[yuvPlane]];
uint8_t * dstRow = &dstImage->yuvPlanes[yuvPlane][j * dstImage->yuvRowBytes[yuvPlane]];
memcpy(dstRow, srcRow, dstImage->yuvRowBytes[yuvPlane]);
}
}
}
- if (srcImage->alphaPlane) {
+ if ((planes & AVIF_PLANES_A) && srcImage->alphaPlane) {
avifImageAllocatePlanes(dstImage, AVIF_PLANES_A);
for (uint32_t j = 0; j < dstImage->height; ++j) {
uint8_t * srcAlphaRow = &srcImage->alphaPlane[j * srcImage->alphaRowBytes];
@@ -237,27 +227,32 @@ void avifImageAllocatePlanes(avifImage * image, uint32_t planes)
image->yuvRowBytes[AVIF_CHAN_Y] = fullRowBytes;
image->yuvPlanes[AVIF_CHAN_Y] = avifAlloc(fullSize);
}
- if (!image->yuvPlanes[AVIF_CHAN_U]) {
- image->yuvRowBytes[AVIF_CHAN_U] = uvRowBytes;
- image->yuvPlanes[AVIF_CHAN_U] = avifAlloc(uvSize);
- }
- if (!image->yuvPlanes[AVIF_CHAN_V]) {
- image->yuvRowBytes[AVIF_CHAN_V] = uvRowBytes;
- image->yuvPlanes[AVIF_CHAN_V] = avifAlloc(uvSize);
+
+ if (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV400) {
+ if (!image->yuvPlanes[AVIF_CHAN_U]) {
+ image->yuvRowBytes[AVIF_CHAN_U] = uvRowBytes;
+ image->yuvPlanes[AVIF_CHAN_U] = avifAlloc(uvSize);
+ }
+ if (!image->yuvPlanes[AVIF_CHAN_V]) {
+ image->yuvRowBytes[AVIF_CHAN_V] = uvRowBytes;
+ image->yuvPlanes[AVIF_CHAN_V] = avifAlloc(uvSize);
+ }
}
+ image->imageOwnsYUVPlanes = AVIF_TRUE;
}
if (planes & AVIF_PLANES_A) {
if (!image->alphaPlane) {
image->alphaRowBytes = fullRowBytes;
image->alphaPlane = avifAlloc(fullRowBytes * image->height);
}
+ image->imageOwnsAlphaPlane = AVIF_TRUE;
}
}
void avifImageFreePlanes(avifImage * image, uint32_t planes)
{
if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) {
- if (!image->decoderOwnsYUVPlanes) {
+ if (image->imageOwnsYUVPlanes) {
avifFree(image->yuvPlanes[AVIF_CHAN_Y]);
avifFree(image->yuvPlanes[AVIF_CHAN_U]);
avifFree(image->yuvPlanes[AVIF_CHAN_V]);
@@ -268,15 +263,15 @@ void avifImageFreePlanes(avifImage * image, uint32_t planes)
image->yuvRowBytes[AVIF_CHAN_U] = 0;
image->yuvPlanes[AVIF_CHAN_V] = NULL;
image->yuvRowBytes[AVIF_CHAN_V] = 0;
- image->decoderOwnsYUVPlanes = AVIF_FALSE;
+ image->imageOwnsYUVPlanes = AVIF_FALSE;
}
if (planes & AVIF_PLANES_A) {
- if (!image->decoderOwnsAlphaPlane) {
+ if (image->imageOwnsAlphaPlane) {
avifFree(image->alphaPlane);
}
image->alphaPlane = NULL;
image->alphaRowBytes = 0;
- image->decoderOwnsAlphaPlane = AVIF_FALSE;
+ image->imageOwnsAlphaPlane = AVIF_FALSE;
}
}
@@ -301,8 +296,9 @@ void avifImageStealPlanes(avifImage * dstImage, avifImage * srcImage, uint32_t p
dstImage->yuvFormat = srcImage->yuvFormat;
dstImage->yuvRange = srcImage->yuvRange;
- dstImage->decoderOwnsYUVPlanes = srcImage->decoderOwnsYUVPlanes;
- srcImage->decoderOwnsYUVPlanes = AVIF_FALSE;
+ dstImage->yuvChromaSamplePosition = srcImage->yuvChromaSamplePosition;
+ dstImage->imageOwnsYUVPlanes = srcImage->imageOwnsYUVPlanes;
+ srcImage->imageOwnsYUVPlanes = AVIF_FALSE;
}
if (planes & AVIF_PLANES_A) {
dstImage->alphaPlane = srcImage->alphaPlane;
@@ -312,12 +308,12 @@ void avifImageStealPlanes(avifImage * dstImage, avifImage * srcImage, uint32_t p
srcImage->alphaPlane = NULL;
srcImage->alphaRowBytes = 0;
- dstImage->decoderOwnsAlphaPlane = srcImage->decoderOwnsAlphaPlane;
- srcImage->decoderOwnsAlphaPlane = AVIF_FALSE;
+ dstImage->imageOwnsAlphaPlane = srcImage->imageOwnsAlphaPlane;
+ srcImage->imageOwnsAlphaPlane = AVIF_FALSE;
}
}
-avifBool avifImageUsesU16(avifImage * image)
+avifBool avifImageUsesU16(const avifImage * image)
{
return (image->depth > 8);
}
@@ -345,17 +341,19 @@ uint32_t avifRGBFormatChannelCount(avifRGBFormat format)
return avifRGBFormatHasAlpha(format) ? 4 : 3;
}
-uint32_t avifRGBImagePixelSize(avifRGBImage * rgb)
+uint32_t avifRGBImagePixelSize(const avifRGBImage * rgb)
{
return avifRGBFormatChannelCount(rgb->format) * ((rgb->depth > 8) ? 2 : 1);
}
-void avifRGBImageSetDefaults(avifRGBImage * rgb, avifImage * image)
+void avifRGBImageSetDefaults(avifRGBImage * rgb, const avifImage * image)
{
rgb->width = image->width;
rgb->height = image->height;
rgb->depth = image->depth;
rgb->format = AVIF_RGB_FORMAT_RGBA;
+ rgb->chromaUpsampling = AVIF_CHROMA_UPSAMPLING_BILINEAR;
+ rgb->ignoreAlpha = AVIF_FALSE;
rgb->pixels = NULL;
rgb->rowBytes = 0;
}
diff --git a/chromium/third_party/libavif/src/src/codec_aom.c b/chromium/third_party/libavif/src/src/codec_aom.c
index 56fa4751ee7..10bc8e87d92 100644
--- a/chromium/third_party/libavif/src/src/codec_aom.c
+++ b/chromium/third_party/libavif/src/src/codec_aom.c
@@ -34,6 +34,12 @@ struct avifCodecInternal
aom_codec_iter_t iter;
uint32_t inputSampleIndex;
aom_image_t * image;
+
+ avifBool encoderInitialized;
+ aom_codec_ctx_t encoder;
+ avifPixelFormatInfo formatInfo;
+ aom_img_fmt_t aomFormat;
+ avifBool monochromeEnabled;
};
static void aomCodecDestroyInternal(avifCodec * codec)
@@ -41,6 +47,9 @@ static void aomCodecDestroyInternal(avifCodec * codec)
if (codec->internal->decoderInitialized) {
aom_codec_destroy(&codec->internal->decoder);
}
+ if (codec->internal->encoderInitialized) {
+ aom_codec_destroy(&codec->internal->encoder);
+ }
avifFree(codec->internal);
}
@@ -71,7 +80,7 @@ static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
break;
} else if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
// Feed another sample
- avifSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
+ avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
++codec->internal->inputSampleIndex;
codec->internal->iter = NULL;
if (aom_codec_decode(&codec->internal->decoder, sample->data.data, sample->data.size, NULL)) {
@@ -112,14 +121,15 @@ static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
case AOM_IMG_FMT_I44416:
yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
break;
+ case AOM_IMG_FMT_NONE:
case AOM_IMG_FMT_YV12:
case AOM_IMG_FMT_AOMYV12:
case AOM_IMG_FMT_YV1216:
- yuvFormat = AVIF_PIXEL_FORMAT_YV12;
- break;
- case AOM_IMG_FMT_NONE:
default:
- break;
+ return AVIF_FALSE;
+ }
+ if (codec->internal->image->monochrome) {
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV400;
}
if (image->width && image->height) {
@@ -145,17 +155,12 @@ static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
// Steal the pointers from the decoder's image directly
avifImageFreePlanes(image, AVIF_PLANES_YUV);
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
- int aomPlaneIndex = yuvPlane;
- if (yuvPlane == AVIF_CHAN_U) {
- aomPlaneIndex = formatInfo.aomIndexU;
- } else if (yuvPlane == AVIF_CHAN_V) {
- aomPlaneIndex = formatInfo.aomIndexV;
- }
- image->yuvPlanes[yuvPlane] = codec->internal->image->planes[aomPlaneIndex];
- image->yuvRowBytes[yuvPlane] = codec->internal->image->stride[aomPlaneIndex];
+ int yuvPlaneCount = (yuvFormat == AVIF_PIXEL_FORMAT_YUV400) ? 1 : 3;
+ for (int yuvPlane = 0; yuvPlane < yuvPlaneCount; ++yuvPlane) {
+ image->yuvPlanes[yuvPlane] = codec->internal->image->planes[yuvPlane];
+ image->yuvRowBytes[yuvPlane] = codec->internal->image->stride[yuvPlane];
}
- image->decoderOwnsYUVPlanes = AVIF_TRUE;
+ image->imageOwnsYUVPlanes = AVIF_FALSE;
} else {
// Alpha plane - ensure image is correct size, fill color
@@ -174,21 +179,18 @@ static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
image->alphaPlane = codec->internal->image->planes[0];
image->alphaRowBytes = codec->internal->image->stride[0];
image->alphaRange = (codec->internal->image->range == AOM_CR_STUDIO_RANGE) ? AVIF_RANGE_LIMITED : AVIF_RANGE_FULL;
- image->decoderOwnsAlphaPlane = AVIF_TRUE;
+ image->imageOwnsAlphaPlane = AVIF_FALSE;
}
return AVIF_TRUE;
}
-static aom_img_fmt_t avifImageCalcAOMFmt(avifImage * image, avifBool alpha, int * yShift)
+static aom_img_fmt_t avifImageCalcAOMFmt(const avifImage * image, avifBool alpha)
{
- *yShift = 0;
-
aom_img_fmt_t fmt;
if (alpha) {
// We're going monochrome, who cares about chroma quality
fmt = AOM_IMG_FMT_I420;
- *yShift = 1;
} else {
switch (image->yuvFormat) {
case AVIF_PIXEL_FORMAT_YUV444:
@@ -198,12 +200,8 @@ static aom_img_fmt_t avifImageCalcAOMFmt(avifImage * image, avifBool alpha, int
fmt = AOM_IMG_FMT_I422;
break;
case AVIF_PIXEL_FORMAT_YUV420:
+ case AVIF_PIXEL_FORMAT_YUV400:
fmt = AOM_IMG_FMT_I420;
- *yShift = 1;
- break;
- case AVIF_PIXEL_FORMAT_YV12:
- fmt = AOM_IMG_FMT_YV12;
- *yShift = 1;
break;
case AVIF_PIXEL_FORMAT_NONE:
default:
@@ -218,145 +216,169 @@ static aom_img_fmt_t avifImageCalcAOMFmt(avifImage * image, avifBool alpha, int
return fmt;
}
-static avifBool aomCodecEncodeImage(avifCodec * codec, avifImage * image, avifEncoder * encoder, avifRWData * obu, avifBool alpha)
+static avifBool aomCodecEncodeImage(avifCodec * codec,
+ avifEncoder * encoder,
+ const avifImage * image,
+ avifBool alpha,
+ uint32_t addImageFlags,
+ avifCodecEncodeOutput * output)
{
- avifBool success = AVIF_FALSE;
- aom_codec_iface_t * encoder_interface = aom_codec_av1_cx();
- aom_codec_ctx_t aomEncoder;
-
- // Map encoder speed to AOM usage + CpuUsed:
- // Speed 0: GoodQuality CpuUsed 0
- // Speed 1: GoodQuality CpuUsed 1
- // Speed 2: GoodQuality CpuUsed 2
- // Speed 3: GoodQuality CpuUsed 3
- // Speed 4: GoodQuality CpuUsed 4
- // Speed 5: GoodQuality CpuUsed 5
- // Speed 6: GoodQuality CpuUsed 5
- // Speed 7: GoodQuality CpuUsed 5
- // Speed 8: RealTime CpuUsed 6
- // Speed 9: RealTime CpuUsed 7
- // Speed 10: RealTime CpuUsed 8
- unsigned int aomUsage = AOM_USAGE_GOOD_QUALITY;
- int aomCpuUsed = -1;
- if (encoder->speed != AVIF_SPEED_DEFAULT) {
- if (encoder->speed < 8) {
- aomUsage = AOM_USAGE_GOOD_QUALITY;
- aomCpuUsed = AVIF_CLAMP(encoder->speed, 0, 5);
- } else {
- aomUsage = AOM_USAGE_REALTIME;
- aomCpuUsed = AVIF_CLAMP(encoder->speed - 2, 6, 8);
+ if (!codec->internal->encoderInitialized) {
+ // Map encoder speed to AOM usage + CpuUsed:
+ // Speed 0: GoodQuality CpuUsed 0
+ // Speed 1: GoodQuality CpuUsed 1
+ // Speed 2: GoodQuality CpuUsed 2
+ // Speed 3: GoodQuality CpuUsed 3
+ // Speed 4: GoodQuality CpuUsed 4
+ // Speed 5: GoodQuality CpuUsed 5
+ // Speed 6: GoodQuality CpuUsed 6
+ // Speed 7: GoodQuality CpuUsed 6
+ // Speed 8: RealTime CpuUsed 6
+ // Speed 9: RealTime CpuUsed 7
+ // Speed 10: RealTime CpuUsed 8
+ unsigned int aomUsage = AOM_USAGE_GOOD_QUALITY;
+ int aomCpuUsed = -1;
+ if (encoder->speed != AVIF_SPEED_DEFAULT) {
+ if (encoder->speed < 8) {
+ aomUsage = AOM_USAGE_GOOD_QUALITY;
+ aomCpuUsed = AVIF_CLAMP(encoder->speed, 0, 6);
+ } else {
+ aomUsage = AOM_USAGE_REALTIME;
+ aomCpuUsed = AVIF_CLAMP(encoder->speed - 2, 6, 8);
+ }
}
- }
- int aomMajorVersion = aom_codec_version_major();
- if ((aomMajorVersion < 2) && (image->depth > 8)) {
- // Due to a known issue with libavif v1.0.0-errata1-avif, 10bpc and
- // 12bpc image encodes will call the wrong variant of
- // aom_subtract_block when cpu-used is 7 or 8, and crash. Until we get
- // a new tagged release from libaom with the fix and can verify we're
- // running with that version of libaom, we must avoid using
- // cpu-used=7/8 on any >8bpc image encodes.
- //
- // Context:
- // * https://github.com/AOMediaCodec/libavif/issues/49
- // * https://bugs.chromium.org/p/aomedia/issues/detail?id=2587
- //
- // Continued bug tracking here:
- // * https://github.com/AOMediaCodec/libavif/issues/56
-
- if (aomCpuUsed > 6) {
- aomCpuUsed = 6;
+ // aom_codec.h says: aom_codec_version() == (major<<16 | minor<<8 | patch)
+ static const int aomVersion_2_0_0 = (2 << 16);
+ const int aomVersion = aom_codec_version();
+ if ((aomVersion < aomVersion_2_0_0) && (image->depth > 8)) {
+ // Due to a known issue with libavif v1.0.0-errata1-avif, 10bpc and
+ // 12bpc image encodes will call the wrong variant of
+ // aom_subtract_block when cpu-used is 7 or 8, and crash. Until we get
+ // a new tagged release from libaom with the fix and can verify we're
+ // running with that version of libaom, we must avoid using
+ // cpu-used=7/8 on any >8bpc image encodes.
+ //
+ // Context:
+ // * https://github.com/AOMediaCodec/libavif/issues/49
+ // * https://bugs.chromium.org/p/aomedia/issues/detail?id=2587
+ //
+ // Continued bug tracking here:
+ // * https://github.com/AOMediaCodec/libavif/issues/56
+
+ if (aomCpuUsed > 6) {
+ aomCpuUsed = 6;
+ }
}
- }
- int yShift = 0;
- aom_img_fmt_t aomFormat = avifImageCalcAOMFmt(image, alpha, &yShift);
- if (aomFormat == AOM_IMG_FMT_NONE) {
- return AVIF_FALSE;
- }
+ codec->internal->aomFormat = avifImageCalcAOMFmt(image, alpha);
+ if (codec->internal->aomFormat == AOM_IMG_FMT_NONE) {
+ return AVIF_FALSE;
+ }
- avifPixelFormatInfo formatInfo;
- avifGetPixelFormatInfo(image->yuvFormat, &formatInfo);
-
- struct aom_codec_enc_cfg cfg;
- aom_codec_enc_config_default(encoder_interface, &cfg, aomUsage);
-
- cfg.g_profile = codec->configBox.seqProfile;
- cfg.g_bit_depth = image->depth;
- cfg.g_input_bit_depth = image->depth;
- cfg.g_w = image->width;
- cfg.g_h = image->height;
- if (encoder->maxThreads > 1) {
- cfg.g_threads = encoder->maxThreads;
- }
+ avifGetPixelFormatInfo(image->yuvFormat, &codec->internal->formatInfo);
+
+ aom_codec_iface_t * encoderInterface = aom_codec_av1_cx();
+ struct aom_codec_enc_cfg cfg;
+ aom_codec_enc_config_default(encoderInterface, &cfg, aomUsage);
+
+ cfg.g_profile = codec->configBox.seqProfile;
+ cfg.g_bit_depth = image->depth;
+ cfg.g_input_bit_depth = image->depth;
+ cfg.g_w = image->width;
+ cfg.g_h = image->height;
+ if (addImageFlags & AVIF_ADD_IMAGE_FLAG_SINGLE) {
+ // Set the maximum number of frames to encode to 1. This instructs
+ // libaom to set still_picture and reduced_still_picture_header to
+ // 1 in AV1 sequence headers.
+ cfg.g_limit = 1;
+ }
+ if (encoder->maxThreads > 1) {
+ cfg.g_threads = encoder->maxThreads;
+ }
- int minQuantizer = AVIF_CLAMP(encoder->minQuantizer, 0, 63);
- int maxQuantizer = AVIF_CLAMP(encoder->maxQuantizer, 0, 63);
- if (alpha) {
- minQuantizer = AVIF_CLAMP(encoder->minQuantizerAlpha, 0, 63);
- maxQuantizer = AVIF_CLAMP(encoder->maxQuantizerAlpha, 0, 63);
- }
- avifBool lossless = ((minQuantizer == AVIF_QUANTIZER_LOSSLESS) && (maxQuantizer == AVIF_QUANTIZER_LOSSLESS));
- cfg.rc_min_quantizer = minQuantizer;
- cfg.rc_max_quantizer = maxQuantizer;
+ int minQuantizer = AVIF_CLAMP(encoder->minQuantizer, 0, 63);
+ int maxQuantizer = AVIF_CLAMP(encoder->maxQuantizer, 0, 63);
+ if (alpha) {
+ minQuantizer = AVIF_CLAMP(encoder->minQuantizerAlpha, 0, 63);
+ maxQuantizer = AVIF_CLAMP(encoder->maxQuantizerAlpha, 0, 63);
+ }
+ avifBool lossless = ((minQuantizer == AVIF_QUANTIZER_LOSSLESS) && (maxQuantizer == AVIF_QUANTIZER_LOSSLESS));
+ cfg.rc_min_quantizer = minQuantizer;
+ cfg.rc_max_quantizer = maxQuantizer;
+
+ codec->internal->monochromeEnabled = AVIF_FALSE;
+ if (aomVersion > aomVersion_2_0_0) {
+ // There exists a bug in libaom's chroma_check() function where it will attempt to
+ // access nonexistent UV planes when encoding monochrome at faster libavif "speeds". It
+ // was fixed shortly after the 2.0.0 libaom release, and the fix exists in both the
+ // master and applejack branches. This ensures that the next version *after* 2.0.0 will
+ // have the fix, and we must avoid cfg.monochrome until then.
+ //
+ // Bugfix Change-Id: https://aomedia-review.googlesource.com/q/I26a39791f820b4d4e1d63ff7141f594c3c7181f5
+
+ if (alpha || (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400)) {
+ codec->internal->monochromeEnabled = AVIF_TRUE;
+ cfg.monochrome = 1;
+ }
+ }
- aom_codec_flags_t encoderFlags = 0;
- if (image->depth > 8) {
- encoderFlags |= AOM_CODEC_USE_HIGHBITDEPTH;
- }
- aom_codec_enc_init(&aomEncoder, encoder_interface, &cfg, encoderFlags);
+ aom_codec_flags_t encoderFlags = 0;
+ if (image->depth > 8) {
+ encoderFlags |= AOM_CODEC_USE_HIGHBITDEPTH;
+ }
+ aom_codec_enc_init(&codec->internal->encoder, encoderInterface, &cfg, encoderFlags);
+ codec->internal->encoderInitialized = AVIF_TRUE;
- if (lossless) {
- aom_codec_control(&aomEncoder, AV1E_SET_LOSSLESS, 1);
- }
- if (encoder->maxThreads > 1) {
- aom_codec_control(&aomEncoder, AV1E_SET_ROW_MT, 1);
- }
- if (encoder->tileRowsLog2 != 0) {
- int tileRowsLog2 = AVIF_CLAMP(encoder->tileRowsLog2, 0, 6);
- aom_codec_control(&aomEncoder, AV1E_SET_TILE_ROWS, tileRowsLog2);
- }
- if (encoder->tileColsLog2 != 0) {
- int tileColsLog2 = AVIF_CLAMP(encoder->tileColsLog2, 0, 6);
- aom_codec_control(&aomEncoder, AV1E_SET_TILE_COLUMNS, tileColsLog2);
- }
- if (aomCpuUsed != -1) {
- aom_codec_control(&aomEncoder, AOME_SET_CPUUSED, aomCpuUsed);
+ if (lossless) {
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_LOSSLESS, 1);
+ }
+ if (encoder->maxThreads > 1) {
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_ROW_MT, 1);
+ }
+ if (encoder->tileRowsLog2 != 0) {
+ int tileRowsLog2 = AVIF_CLAMP(encoder->tileRowsLog2, 0, 6);
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_TILE_ROWS, tileRowsLog2);
+ }
+ if (encoder->tileColsLog2 != 0) {
+ int tileColsLog2 = AVIF_CLAMP(encoder->tileColsLog2, 0, 6);
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_TILE_COLUMNS, tileColsLog2);
+ }
+ if (aomCpuUsed != -1) {
+ aom_codec_control(&codec->internal->encoder, AOME_SET_CPUUSED, aomCpuUsed);
+ }
}
+ int yShift = codec->internal->formatInfo.chromaShiftY;
uint32_t uvHeight = (image->height + yShift) >> yShift;
- aom_image_t * aomImage = aom_img_alloc(NULL, aomFormat, image->width, image->height, 16);
+ aom_image_t * aomImage = aom_img_alloc(NULL, codec->internal->aomFormat, image->width, image->height, 16);
+ avifBool monochromeRequested = AVIF_FALSE;
if (alpha) {
aomImage->range = (image->alphaRange == AVIF_RANGE_FULL) ? AOM_CR_FULL_RANGE : AOM_CR_STUDIO_RANGE;
- aom_codec_control(&aomEncoder, AV1E_SET_COLOR_RANGE, aomImage->range);
- aomImage->monochrome = 1;
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_COLOR_RANGE, aomImage->range);
+ monochromeRequested = AVIF_TRUE;
for (uint32_t j = 0; j < image->height; ++j) {
uint8_t * srcAlphaRow = &image->alphaPlane[j * image->alphaRowBytes];
uint8_t * dstAlphaRow = &aomImage->planes[0][j * aomImage->stride[0]];
memcpy(dstAlphaRow, srcAlphaRow, image->alphaRowBytes);
}
- // Zero out U and V
- memset(aomImage->planes[1], 0, aomImage->stride[1] * uvHeight);
- memset(aomImage->planes[2], 0, aomImage->stride[2] * uvHeight);
+ // Ignore UV planes when monochrome
} else {
aomImage->range = (image->yuvRange == AVIF_RANGE_FULL) ? AOM_CR_FULL_RANGE : AOM_CR_STUDIO_RANGE;
- aom_codec_control(&aomEncoder, AV1E_SET_COLOR_RANGE, aomImage->range);
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
- int aomPlaneIndex = yuvPlane;
- int planeHeight = image->height;
- if (yuvPlane == AVIF_CHAN_U) {
- aomPlaneIndex = formatInfo.aomIndexU;
- planeHeight = uvHeight;
- } else if (yuvPlane == AVIF_CHAN_V) {
- aomPlaneIndex = formatInfo.aomIndexV;
- planeHeight = uvHeight;
- }
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_COLOR_RANGE, aomImage->range);
+ int yuvPlaneCount = 3;
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400) {
+ yuvPlaneCount = 1; // Ignore UV planes when monochrome
+ monochromeRequested = AVIF_TRUE;
+ }
+ for (int yuvPlane = 0; yuvPlane < yuvPlaneCount; ++yuvPlane) {
+ uint32_t planeHeight = (yuvPlane == AVIF_CHAN_Y) ? image->height : uvHeight;
- for (int j = 0; j < planeHeight; ++j) {
+ for (uint32_t j = 0; j < planeHeight; ++j) {
uint8_t * srcRow = &image->yuvPlanes[yuvPlane][j * image->yuvRowBytes[yuvPlane]];
- uint8_t * dstRow = &aomImage->planes[aomPlaneIndex][j * aomImage->stride[aomPlaneIndex]];
+ uint8_t * dstRow = &aomImage->planes[yuvPlane][j * aomImage->stride[yuvPlane]];
memcpy(dstRow, srcRow, image->yuvRowBytes[yuvPlane]);
}
}
@@ -364,35 +386,84 @@ static avifBool aomCodecEncodeImage(avifCodec * codec, avifImage * image, avifEn
aomImage->cp = (aom_color_primaries_t)image->colorPrimaries;
aomImage->tc = (aom_transfer_characteristics_t)image->transferCharacteristics;
aomImage->mc = (aom_matrix_coefficients_t)image->matrixCoefficients;
- aom_codec_control(&aomEncoder, AV1E_SET_COLOR_PRIMARIES, aomImage->cp);
- aom_codec_control(&aomEncoder, AV1E_SET_TRANSFER_CHARACTERISTICS, aomImage->tc);
- aom_codec_control(&aomEncoder, AV1E_SET_MATRIX_COEFFICIENTS, aomImage->mc);
+ aomImage->csp = (aom_chroma_sample_position_t)image->yuvChromaSamplePosition;
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_COLOR_PRIMARIES, aomImage->cp);
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_TRANSFER_CHARACTERISTICS, aomImage->tc);
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_MATRIX_COEFFICIENTS, aomImage->mc);
+ aom_codec_control(&codec->internal->encoder, AV1E_SET_CHROMA_SAMPLE_POSITION, aomImage->csp);
+ }
+
+ if (monochromeRequested && !codec->internal->monochromeEnabled) {
+ // The user requested monochrome (via alpha or YUV400) but libaom cannot currently support
+ // monochrome (see chroma_check comment above). Manually set UV planes to 0.5.
+
+ // aomImage is always 420 when we're monochrome
+ uint32_t monoUVWidth = (image->width + 1) >> 1;
+ uint32_t monoUVHeight = (image->height + 1) >> 1;
+
+ for (int yuvPlane = 1; yuvPlane < 3; ++yuvPlane) {
+ if (image->depth > 8) {
+ const uint16_t half = 1 << (image->depth - 1);
+ for (uint32_t j = 0; j < monoUVHeight; ++j) {
+ uint16_t * dstRow = (uint16_t *)&aomImage->planes[yuvPlane][j * aomImage->stride[yuvPlane]];
+ for (uint32_t i = 0; i < monoUVWidth; ++i) {
+ dstRow[i] = half;
+ }
+ }
+ } else {
+ const uint8_t half = 128;
+ size_t planeSize = (size_t)monoUVHeight * aomImage->stride[yuvPlane];
+ memset(aomImage->planes[yuvPlane], half, planeSize);
+ }
+ }
}
- aom_codec_encode(&aomEncoder, aomImage, 0, 1, 0);
+ aom_enc_frame_flags_t encodeFlags = 0;
+ if (addImageFlags & AVIF_ADD_IMAGE_FLAG_FORCE_KEYFRAME) {
+ encodeFlags |= AOM_EFLAG_FORCE_KF;
+ }
+ aom_codec_encode(&codec->internal->encoder, aomImage, 0, 1, encodeFlags);
- avifBool flushed = AVIF_FALSE;
aom_codec_iter_t iter = NULL;
for (;;) {
- const aom_codec_cx_pkt_t * pkt = aom_codec_get_cx_data(&aomEncoder, &iter);
+ const aom_codec_cx_pkt_t * pkt = aom_codec_get_cx_data(&codec->internal->encoder, &iter);
if (pkt == NULL) {
- if (flushed)
- break;
-
- aom_codec_encode(&aomEncoder, NULL, 0, 1, 0); // flush
- flushed = AVIF_TRUE;
- continue;
+ break;
}
if (pkt->kind == AOM_CODEC_CX_FRAME_PKT) {
- avifRWDataSet(obu, pkt->data.frame.buf, pkt->data.frame.sz);
- success = AVIF_TRUE;
- break;
+ avifCodecEncodeOutputAddSample(output, pkt->data.frame.buf, pkt->data.frame.sz, (pkt->data.frame.flags & AOM_FRAME_IS_KEY));
}
}
aom_img_free(aomImage);
- aom_codec_destroy(&aomEncoder);
- return success;
+ return AVIF_TRUE;
+}
+
+static avifBool aomCodecEncodeFinish(avifCodec * codec, avifCodecEncodeOutput * output)
+{
+ for (;;) {
+ // flush encoder
+ aom_codec_encode(&codec->internal->encoder, NULL, 0, 1, 0);
+
+ avifBool gotPacket = AVIF_FALSE;
+ aom_codec_iter_t iter = NULL;
+ for (;;) {
+ const aom_codec_cx_pkt_t * pkt = aom_codec_get_cx_data(&codec->internal->encoder, &iter);
+ if (pkt == NULL) {
+ break;
+ }
+ if (pkt->kind == AOM_CODEC_CX_FRAME_PKT) {
+ gotPacket = AVIF_TRUE;
+ avifCodecEncodeOutputAddSample(
+ output, pkt->data.frame.buf, pkt->data.frame.sz, (pkt->data.frame.flags & AOM_FRAME_IS_KEY));
+ }
+ }
+
+ if (!gotPacket) {
+ break;
+ }
+ }
+ return AVIF_TRUE;
}
const char * avifCodecVersionAOM(void)
@@ -407,6 +478,7 @@ avifCodec * avifCodecCreateAOM(void)
codec->open = aomCodecOpen;
codec->getNextImage = aomCodecGetNextImage;
codec->encodeImage = aomCodecEncodeImage;
+ codec->encodeFinish = aomCodecEncodeFinish;
codec->destroyInternal = aomCodecDestroyInternal;
codec->internal = (struct avifCodecInternal *)avifAlloc(sizeof(struct avifCodecInternal));
diff --git a/chromium/third_party/libavif/src/src/codec_dav1d.c b/chromium/third_party/libavif/src/src/codec_dav1d.c
index a20a377a9a9..b1c9c4b279a 100644
--- a/chromium/third_party/libavif/src/src/codec_dav1d.c
+++ b/chromium/third_party/libavif/src/src/codec_dav1d.c
@@ -52,7 +52,7 @@ static avifBool dav1dFeedData(avifCodec * codec)
{
if (!codec->internal->dav1dData.sz) {
if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
- avifSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
+ avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
++codec->internal->inputSampleIndex;
if (dav1d_data_wrap(&codec->internal->dav1dData, sample->data.data, sample->data.size, avifDav1dFreeCallback, NULL) != 0) {
@@ -126,6 +126,8 @@ static avifBool dav1dCodecGetNextImage(avifCodec * codec, avifImage * image)
avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
switch (dav1dImage->p.layout) {
case DAV1D_PIXEL_LAYOUT_I400:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV400;
+ break;
case DAV1D_PIXEL_LAYOUT_I420:
yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
break;
@@ -159,11 +161,12 @@ static avifBool dav1dCodecGetNextImage(avifCodec * codec, avifImage * image)
avifGetPixelFormatInfo(yuvFormat, &formatInfo);
avifImageFreePlanes(image, AVIF_PLANES_YUV);
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ int yuvPlaneCount = (yuvFormat == AVIF_PIXEL_FORMAT_YUV400) ? 1 : 3;
+ for (int yuvPlane = 0; yuvPlane < yuvPlaneCount; ++yuvPlane) {
image->yuvPlanes[yuvPlane] = dav1dImage->data[yuvPlane];
image->yuvRowBytes[yuvPlane] = (uint32_t)dav1dImage->stride[(yuvPlane == AVIF_CHAN_Y) ? 0 : 1];
}
- image->decoderOwnsYUVPlanes = AVIF_TRUE;
+ image->imageOwnsYUVPlanes = AVIF_FALSE;
} else {
// Alpha plane - ensure image is correct size, fill color
@@ -182,7 +185,7 @@ static avifBool dav1dCodecGetNextImage(avifCodec * codec, avifImage * image)
image->alphaPlane = dav1dImage->data[0];
image->alphaRowBytes = (uint32_t)dav1dImage->stride[0];
image->alphaRange = codec->internal->colorRange;
- image->decoderOwnsAlphaPlane = AVIF_TRUE;
+ image->imageOwnsAlphaPlane = AVIF_FALSE;
}
return AVIF_TRUE;
}
@@ -204,6 +207,6 @@ avifCodec * avifCodecCreateDav1d(void)
memset(codec->internal, 0, sizeof(struct avifCodecInternal));
dav1d_default_settings(&codec->internal->dav1dSettings);
// Set a maximum frame size limit to avoid OOM'ing fuzzers.
- codec->internal->dav1dSettings.frame_size_limit = 16384 * 16384;
+ codec->internal->dav1dSettings.frame_size_limit = AVIF_MAX_IMAGE_SIZE;
return codec;
}
diff --git a/chromium/third_party/libavif/src/src/codec_libgav1.c b/chromium/third_party/libavif/src/src/codec_libgav1.c
index d7260c3d8a8..cb020caa40e 100644
--- a/chromium/third_party/libavif/src/src/codec_libgav1.c
+++ b/chromium/third_party/libavif/src/src/codec_libgav1.c
@@ -42,7 +42,7 @@ static avifBool gav1CodecGetNextImage(avifCodec * codec, avifImage * image)
// Check if there are more samples to feed
if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
// Feed another sample
- avifSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
+ avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
++codec->internal->inputSampleIndex;
if (Libgav1DecoderEnqueueFrame(codec->internal->gav1Decoder,
sample->data.data,
@@ -80,6 +80,8 @@ static avifBool gav1CodecGetNextImage(avifCodec * codec, avifImage * image)
avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
switch (gav1Image->image_format) {
case kLibgav1ImageFormatMonochrome400:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV400;
+ break;
case kLibgav1ImageFormatYuv420:
yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
break;
@@ -115,11 +117,12 @@ static avifBool gav1CodecGetNextImage(avifCodec * codec, avifImage * image)
// Steal the pointers from the decoder's image directly
avifImageFreePlanes(image, AVIF_PLANES_YUV);
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ int yuvPlaneCount = (yuvFormat == AVIF_PIXEL_FORMAT_YUV400) ? 1 : 3;
+ for (int yuvPlane = 0; yuvPlane < yuvPlaneCount; ++yuvPlane) {
image->yuvPlanes[yuvPlane] = gav1Image->plane[yuvPlane];
image->yuvRowBytes[yuvPlane] = gav1Image->stride[yuvPlane];
}
- image->decoderOwnsYUVPlanes = AVIF_TRUE;
+ image->imageOwnsYUVPlanes = AVIF_FALSE;
} else {
// Alpha plane - ensure image is correct size, fill color
@@ -138,7 +141,7 @@ static avifBool gav1CodecGetNextImage(avifCodec * codec, avifImage * image)
image->alphaPlane = gav1Image->plane[0];
image->alphaRowBytes = gav1Image->stride[0];
image->alphaRange = codec->internal->colorRange;
- image->decoderOwnsAlphaPlane = AVIF_TRUE;
+ image->imageOwnsAlphaPlane = AVIF_FALSE;
}
return AVIF_TRUE;
diff --git a/chromium/third_party/libavif/src/src/codec_rav1e.c b/chromium/third_party/libavif/src/src/codec_rav1e.c
index 5c8b332b72f..6a7fb226f8e 100644
--- a/chromium/third_party/libavif/src/src/codec_rav1e.c
+++ b/chromium/third_party/libavif/src/src/codec_rav1e.c
@@ -9,162 +9,212 @@
struct avifCodecInternal
{
- uint32_t unused; // rav1e codec has no state
+ RaContext * rav1eContext;
+ RaChromaSampling chromaSampling;
+ int yShift;
};
static void rav1eCodecDestroyInternal(avifCodec * codec)
{
+ if (codec->internal->rav1eContext) {
+ rav1e_context_unref(codec->internal->rav1eContext);
+ codec->internal->rav1eContext = NULL;
+ }
avifFree(codec->internal);
}
static avifBool rav1eCodecOpen(struct avifCodec * codec, uint32_t firstSampleIndex)
{
(void)firstSampleIndex; // Codec is encode-only, this isn't used
- (void)codec;
+
+ codec->internal->rav1eContext = NULL;
return AVIF_TRUE;
}
-static avifBool rav1eCodecEncodeImage(avifCodec * codec, avifImage * image, avifEncoder * encoder, avifRWData * obu, avifBool alpha)
+// Official support wasn't added until v0.4.0
+static avifBool rav1eSupports400(void)
{
- (void)codec; // unused
+ const char * rav1eVersionString = rav1e_version_short();
+
+ // Check major version > 0
+ int majorVersion = atoi(rav1eVersionString);
+ if (majorVersion > 0) {
+ return AVIF_TRUE;
+ }
+ // Check minor version >= 4
+ const char * minorVersionString = strchr(rav1eVersionString, '.');
+ if (!minorVersionString) {
+ return AVIF_FALSE;
+ }
+ ++minorVersionString;
+ if (!(*minorVersionString)) {
+ return AVIF_FALSE;
+ }
+ int minorVersion = atoi(minorVersionString);
+ return minorVersion >= 4;
+}
+
+static avifBool rav1eCodecEncodeImage(avifCodec * codec,
+ avifEncoder * encoder,
+ const avifImage * image,
+ avifBool alpha,
+ uint32_t addImageFlags,
+ avifCodecEncodeOutput * output)
+{
avifBool success = AVIF_FALSE;
RaConfig * rav1eConfig = NULL;
- RaContext * rav1eContext = NULL;
RaFrame * rav1eFrame = NULL;
- RaPacket * pkt = NULL;
- int yShift = 0;
- RaChromaSampling chromaSampling;
- RaPixelRange rav1eRange;
- if (alpha) {
- rav1eRange = (image->alphaRange == AVIF_RANGE_FULL) ? RA_PIXEL_RANGE_FULL : RA_PIXEL_RANGE_LIMITED;
- chromaSampling = RA_CHROMA_SAMPLING_CS422; // I can't seem to get RA_CHROMA_SAMPLING_CS400 to work right now, unfortunately
- } else {
- rav1eRange = (image->yuvRange == AVIF_RANGE_FULL) ? RA_PIXEL_RANGE_FULL : RA_PIXEL_RANGE_LIMITED;
- switch (image->yuvFormat) {
- case AVIF_PIXEL_FORMAT_YUV444:
- chromaSampling = RA_CHROMA_SAMPLING_CS444;
- break;
- case AVIF_PIXEL_FORMAT_YUV422:
- chromaSampling = RA_CHROMA_SAMPLING_CS422;
- break;
- case AVIF_PIXEL_FORMAT_YUV420:
- chromaSampling = RA_CHROMA_SAMPLING_CS420;
- yShift = 1;
- break;
- case AVIF_PIXEL_FORMAT_YV12:
- case AVIF_PIXEL_FORMAT_NONE:
- default:
- return AVIF_FALSE;
+ if (!codec->internal->rav1eContext) {
+ const avifBool supports400 = rav1eSupports400();
+ RaPixelRange rav1eRange;
+ if (alpha) {
+ rav1eRange = (image->alphaRange == AVIF_RANGE_FULL) ? RA_PIXEL_RANGE_FULL : RA_PIXEL_RANGE_LIMITED;
+ codec->internal->chromaSampling = supports400 ? RA_CHROMA_SAMPLING_CS400 : RA_CHROMA_SAMPLING_CS420;
+ codec->internal->yShift = 1;
+ } else {
+ rav1eRange = (image->yuvRange == AVIF_RANGE_FULL) ? RA_PIXEL_RANGE_FULL : RA_PIXEL_RANGE_LIMITED;
+ codec->internal->yShift = 0;
+ switch (image->yuvFormat) {
+ case AVIF_PIXEL_FORMAT_YUV444:
+ codec->internal->chromaSampling = RA_CHROMA_SAMPLING_CS444;
+ break;
+ case AVIF_PIXEL_FORMAT_YUV422:
+ codec->internal->chromaSampling = RA_CHROMA_SAMPLING_CS422;
+ break;
+ case AVIF_PIXEL_FORMAT_YUV420:
+ codec->internal->chromaSampling = RA_CHROMA_SAMPLING_CS420;
+ codec->internal->yShift = 1;
+ break;
+ case AVIF_PIXEL_FORMAT_YUV400:
+ codec->internal->chromaSampling = supports400 ? RA_CHROMA_SAMPLING_CS400 : RA_CHROMA_SAMPLING_CS420;
+ codec->internal->yShift = 1;
+ break;
+ case AVIF_PIXEL_FORMAT_NONE:
+ default:
+ return AVIF_FALSE;
+ }
}
- }
- rav1eConfig = rav1e_config_default();
- if (rav1e_config_set_pixel_format(
- rav1eConfig, (uint8_t)image->depth, chromaSampling, RA_CHROMA_SAMPLE_POSITION_UNKNOWN, rav1eRange) < 0) {
- goto cleanup;
- }
-
- if (rav1e_config_parse(rav1eConfig, "still_picture", "true") == -1) {
- goto cleanup;
- }
- if (rav1e_config_parse_int(rav1eConfig, "width", image->width) == -1) {
- goto cleanup;
- }
- if (rav1e_config_parse_int(rav1eConfig, "height", image->height) == -1) {
- goto cleanup;
- }
- if (rav1e_config_parse_int(rav1eConfig, "threads", encoder->maxThreads) == -1) {
- goto cleanup;
- }
+ rav1eConfig = rav1e_config_default();
+ if (rav1e_config_set_pixel_format(rav1eConfig,
+ (uint8_t)image->depth,
+ codec->internal->chromaSampling,
+ (RaChromaSamplePosition)image->yuvChromaSamplePosition,
+ rav1eRange) < 0) {
+ goto cleanup;
+ }
- int minQuantizer = AVIF_CLAMP(encoder->minQuantizer, 0, 63);
- int maxQuantizer = AVIF_CLAMP(encoder->maxQuantizer, 0, 63);
- if (alpha) {
- minQuantizer = AVIF_CLAMP(encoder->minQuantizerAlpha, 0, 63);
- maxQuantizer = AVIF_CLAMP(encoder->maxQuantizerAlpha, 0, 63);
- }
- minQuantizer = (minQuantizer * 255) / 63; // Rescale quantizer values as rav1e's QP range is [0,255]
- maxQuantizer = (maxQuantizer * 255) / 63;
- if (rav1e_config_parse_int(rav1eConfig, "min_quantizer", minQuantizer) == -1) {
- goto cleanup;
- }
- if (rav1e_config_parse_int(rav1eConfig, "quantizer", maxQuantizer) == -1) {
- goto cleanup;
- }
- if (encoder->tileRowsLog2 != 0) {
- int tileRowsLog2 = AVIF_CLAMP(encoder->tileRowsLog2, 0, 6);
- if (rav1e_config_parse_int(rav1eConfig, "tile_rows", 1 << tileRowsLog2) == -1) {
+ if (addImageFlags & AVIF_ADD_IMAGE_FLAG_SINGLE) {
+ if (rav1e_config_parse(rav1eConfig, "still_picture", "true") == -1) {
+ goto cleanup;
+ }
+ }
+ if (rav1e_config_parse_int(rav1eConfig, "width", image->width) == -1) {
goto cleanup;
}
- }
- if (encoder->tileColsLog2 != 0) {
- int tileColsLog2 = AVIF_CLAMP(encoder->tileColsLog2, 0, 6);
- if (rav1e_config_parse_int(rav1eConfig, "tile_cols", 1 << tileColsLog2) == -1) {
+ if (rav1e_config_parse_int(rav1eConfig, "height", image->height) == -1) {
goto cleanup;
}
- }
- if (encoder->speed != AVIF_SPEED_DEFAULT) {
- int speed = AVIF_CLAMP(encoder->speed, 0, 10);
- if (rav1e_config_parse_int(rav1eConfig, "speed", speed) == -1) {
+ if (rav1e_config_parse_int(rav1eConfig, "threads", encoder->maxThreads) == -1) {
goto cleanup;
}
- }
- rav1e_config_set_color_description(rav1eConfig,
- (RaMatrixCoefficients)image->matrixCoefficients,
- (RaColorPrimaries)image->colorPrimaries,
- (RaTransferCharacteristics)image->transferCharacteristics);
+ int minQuantizer = AVIF_CLAMP(encoder->minQuantizer, 0, 63);
+ int maxQuantizer = AVIF_CLAMP(encoder->maxQuantizer, 0, 63);
+ if (alpha) {
+ minQuantizer = AVIF_CLAMP(encoder->minQuantizerAlpha, 0, 63);
+ maxQuantizer = AVIF_CLAMP(encoder->maxQuantizerAlpha, 0, 63);
+ }
+ minQuantizer = (minQuantizer * 255) / 63; // Rescale quantizer values as rav1e's QP range is [0,255]
+ maxQuantizer = (maxQuantizer * 255) / 63;
+ if (rav1e_config_parse_int(rav1eConfig, "min_quantizer", minQuantizer) == -1) {
+ goto cleanup;
+ }
+ if (rav1e_config_parse_int(rav1eConfig, "quantizer", maxQuantizer) == -1) {
+ goto cleanup;
+ }
+ if (encoder->tileRowsLog2 != 0) {
+ int tileRowsLog2 = AVIF_CLAMP(encoder->tileRowsLog2, 0, 6);
+ if (rav1e_config_parse_int(rav1eConfig, "tile_rows", 1 << tileRowsLog2) == -1) {
+ goto cleanup;
+ }
+ }
+ if (encoder->tileColsLog2 != 0) {
+ int tileColsLog2 = AVIF_CLAMP(encoder->tileColsLog2, 0, 6);
+ if (rav1e_config_parse_int(rav1eConfig, "tile_cols", 1 << tileColsLog2) == -1) {
+ goto cleanup;
+ }
+ }
+ if (encoder->speed != AVIF_SPEED_DEFAULT) {
+ int speed = AVIF_CLAMP(encoder->speed, 0, 10);
+ if (rav1e_config_parse_int(rav1eConfig, "speed", speed) == -1) {
+ goto cleanup;
+ }
+ }
- rav1eContext = rav1e_context_new(rav1eConfig);
- if (!rav1eContext) {
- goto cleanup;
+ rav1e_config_set_color_description(rav1eConfig,
+ (RaMatrixCoefficients)image->matrixCoefficients,
+ (RaColorPrimaries)image->colorPrimaries,
+ (RaTransferCharacteristics)image->transferCharacteristics);
+
+ codec->internal->rav1eContext = rav1e_context_new(rav1eConfig);
+ if (!codec->internal->rav1eContext) {
+ goto cleanup;
+ }
}
- rav1eFrame = rav1e_frame_new(rav1eContext);
+
+ rav1eFrame = rav1e_frame_new(codec->internal->rav1eContext);
int byteWidth = (image->depth > 8) ? 2 : 1;
if (alpha) {
rav1e_frame_fill_plane(rav1eFrame, 0, image->alphaPlane, image->alphaRowBytes * image->height, image->alphaRowBytes, byteWidth);
} else {
- uint32_t uvHeight = (image->height + yShift) >> yShift;
rav1e_frame_fill_plane(rav1eFrame, 0, image->yuvPlanes[0], image->yuvRowBytes[0] * image->height, image->yuvRowBytes[0], byteWidth);
- rav1e_frame_fill_plane(rav1eFrame, 1, image->yuvPlanes[1], image->yuvRowBytes[1] * uvHeight, image->yuvRowBytes[1], byteWidth);
- rav1e_frame_fill_plane(rav1eFrame, 2, image->yuvPlanes[2], image->yuvRowBytes[2] * uvHeight, image->yuvRowBytes[2], byteWidth);
+ if (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV400) {
+ uint32_t uvHeight = (image->height + codec->internal->yShift) >> codec->internal->yShift;
+ rav1e_frame_fill_plane(rav1eFrame, 1, image->yuvPlanes[1], image->yuvRowBytes[1] * uvHeight, image->yuvRowBytes[1], byteWidth);
+ rav1e_frame_fill_plane(rav1eFrame, 2, image->yuvPlanes[2], image->yuvRowBytes[2] * uvHeight, image->yuvRowBytes[2], byteWidth);
+ }
}
- RaEncoderStatus encoderStatus = rav1e_send_frame(rav1eContext, rav1eFrame);
- if (encoderStatus != 0) {
- goto cleanup;
- }
- encoderStatus = rav1e_send_frame(rav1eContext, NULL); // flush
- if (encoderStatus != 0) {
- goto cleanup;
+ RaFrameTypeOverride frameType = RA_FRAME_TYPE_OVERRIDE_NO;
+ if (addImageFlags & AVIF_ADD_IMAGE_FLAG_FORCE_KEYFRAME) {
+ frameType = RA_FRAME_TYPE_OVERRIDE_KEY;
}
+ rav1e_frame_set_type(rav1eFrame, frameType);
- encoderStatus = rav1e_receive_packet(rav1eContext, &pkt);
- if (encoderStatus != 0) {
+ RaEncoderStatus encoderStatus = rav1e_send_frame(codec->internal->rav1eContext, rav1eFrame);
+ if (encoderStatus != RA_ENCODER_STATUS_SUCCESS) {
goto cleanup;
}
- if (pkt && pkt->data && (pkt->len > 0)) {
- avifRWDataSet(obu, pkt->data, pkt->len);
- success = AVIF_TRUE;
+ RaPacket * pkt = NULL;
+ for (;;) {
+ encoderStatus = rav1e_receive_packet(codec->internal->rav1eContext, &pkt);
+ if (encoderStatus == RA_ENCODER_STATUS_ENCODED) {
+ continue;
+ }
+ if ((encoderStatus != RA_ENCODER_STATUS_SUCCESS) && (encoderStatus != RA_ENCODER_STATUS_NEED_MORE_DATA)) {
+ goto cleanup;
+ } else if (pkt) {
+ if (pkt->data && (pkt->len > 0)) {
+ avifCodecEncodeOutputAddSample(output, pkt->data, pkt->len, (pkt->frame_type == RA_FRAME_TYPE_KEY));
+ }
+ rav1e_packet_unref(pkt);
+ pkt = NULL;
+ } else {
+ break;
+ }
}
+ success = AVIF_TRUE;
cleanup:
- if (pkt) {
- rav1e_packet_unref(pkt);
- pkt = NULL;
- }
if (rav1eFrame) {
rav1e_frame_unref(rav1eFrame);
rav1eFrame = NULL;
}
- if (rav1eContext) {
- rav1e_context_unref(rav1eContext);
- rav1eContext = NULL;
- }
if (rav1eConfig) {
rav1e_config_unref(rav1eConfig);
rav1eConfig = NULL;
@@ -172,6 +222,43 @@ cleanup:
return success;
}
+static avifBool rav1eCodecEncodeFinish(avifCodec * codec, avifCodecEncodeOutput * output)
+{
+ for (;;) {
+ RaEncoderStatus encoderStatus = rav1e_send_frame(codec->internal->rav1eContext, NULL); // flush
+ if (encoderStatus != RA_ENCODER_STATUS_SUCCESS) {
+ return AVIF_FALSE;
+ }
+
+ avifBool gotPacket = AVIF_FALSE;
+ RaPacket * pkt = NULL;
+ for (;;) {
+ encoderStatus = rav1e_receive_packet(codec->internal->rav1eContext, &pkt);
+ if (encoderStatus == RA_ENCODER_STATUS_ENCODED) {
+ continue;
+ }
+ if ((encoderStatus != RA_ENCODER_STATUS_SUCCESS) && (encoderStatus != RA_ENCODER_STATUS_LIMIT_REACHED)) {
+ return AVIF_FALSE;
+ }
+ if (pkt) {
+ gotPacket = AVIF_TRUE;
+ if (pkt->data && (pkt->len > 0)) {
+ avifCodecEncodeOutputAddSample(output, pkt->data, pkt->len, (pkt->frame_type == RA_FRAME_TYPE_KEY));
+ }
+ rav1e_packet_unref(pkt);
+ pkt = NULL;
+ } else {
+ break;
+ }
+ }
+
+ if (!gotPacket) {
+ break;
+ }
+ }
+ return AVIF_TRUE;
+}
+
const char * avifCodecVersionRav1e(void)
{
return rav1e_version_full();
@@ -183,6 +270,7 @@ avifCodec * avifCodecCreateRav1e(void)
memset(codec, 0, sizeof(struct avifCodec));
codec->open = rav1eCodecOpen;
codec->encodeImage = rav1eCodecEncodeImage;
+ codec->encodeFinish = rav1eCodecEncodeFinish;
codec->destroyInternal = rav1eCodecDestroyInternal;
codec->internal = (struct avifCodecInternal *)avifAlloc(sizeof(struct avifCodecInternal));
diff --git a/chromium/third_party/libavif/src/src/colr.c b/chromium/third_party/libavif/src/src/colr.c
index 32862cde8ef..d09f485a691 100644
--- a/chromium/third_party/libavif/src/src/colr.c
+++ b/chromium/third_party/libavif/src/src/colr.c
@@ -52,7 +52,7 @@ static avifBool primariesMatch(const float p1[8], const float p2[8])
matchesTo3RoundedPlaces(p1[5], p2[5]) && matchesTo3RoundedPlaces(p1[6], p2[6]) && matchesTo3RoundedPlaces(p1[7], p2[7]);
}
-avifColorPrimaries avifColorPrimariesFind(float inPrimaries[8], const char ** outName)
+avifColorPrimaries avifColorPrimariesFind(const float inPrimaries[8], const char ** outName)
{
if (outName) {
*outName = NULL;
@@ -94,7 +94,7 @@ static const struct avifMatrixCoefficientsTable matrixCoefficientsTables[] = {
static const int avifMatrixCoefficientsTableSize = sizeof(matrixCoefficientsTables) / sizeof(matrixCoefficientsTables[0]);
-static avifBool calcYUVInfoFromCICP(avifImage * image, float coeffs[3])
+static avifBool calcYUVInfoFromCICP(const avifImage * image, float coeffs[3])
{
if (image->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL) {
float primaries[8];
@@ -135,7 +135,7 @@ static avifBool calcYUVInfoFromCICP(avifImage * image, float coeffs[3])
return AVIF_FALSE;
}
-void avifCalcYUVCoefficients(avifImage * image, float * outR, float * outG, float * outB)
+void avifCalcYUVCoefficients(const avifImage * image, float * outR, float * outG, float * outB)
{
// sRGB (BT.709) defaults, as explained here:
//
diff --git a/chromium/third_party/libavif/src/src/mem.c b/chromium/third_party/libavif/src/src/mem.c
index 4ba8a8e5a75..5022d26c485 100644
--- a/chromium/third_party/libavif/src/src/mem.c
+++ b/chromium/third_party/libavif/src/src/mem.c
@@ -7,7 +7,11 @@
void * avifAlloc(size_t size)
{
- return malloc(size);
+ void * out = malloc(size);
+ if (out == NULL) {
+ abort();
+ }
+ return out;
}
void avifFree(void * p)
diff --git a/chromium/third_party/libavif/src/src/obu.c b/chromium/third_party/libavif/src/src/obu.c
new file mode 100644
index 00000000000..d168f141aef
--- /dev/null
+++ b/chromium/third_party/libavif/src/src/obu.c
@@ -0,0 +1,360 @@
+/*
+ * Copyright © 2018, VideoLAN and dav1d authors
+ * Copyright © 2018, Two Orioles, LLC
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// OBU parsing and bit magic all originally from dav1d's obu.c and getbits.c,
+// but heavily modified/reduced down to simply find the Sequence Header OBU
+// and pull a few interesting pieces from it.
+//
+// Any other code in here is under this license:
+//
+// Copyright 2020 Joe Drago. All rights reserved.
+// SPDX-License-Identifier: BSD-2-Clause
+
+#include "avif/internal.h"
+
+#include <string.h>
+
+// ---------------------------------------------------------------------------
+// avifBits - Originally dav1d's GetBits struct (see dav1d's getbits.c)
+
+typedef struct avifBits
+{
+ int error, eof;
+ uint64_t state;
+ uint32_t bitsLeft;
+ const uint8_t *ptr, *start, *end;
+} avifBits;
+
+static inline uint32_t avifBitsReadPos(const avifBits * bits)
+{
+ return (uint32_t)(bits->ptr - bits->start) * 8 - bits->bitsLeft;
+}
+
+static void avifBitsInit(avifBits * const bits, const uint8_t * const data, const size_t size)
+{
+ bits->ptr = bits->start = data;
+ bits->end = &bits->start[size];
+ bits->bitsLeft = 0;
+ bits->state = 0;
+ bits->error = 0;
+ bits->eof = 0;
+}
+
+static void avifBitsRefill(avifBits * const bits, const uint32_t n)
+{
+ uint64_t state = 0;
+ do {
+ state <<= 8;
+ bits->bitsLeft += 8;
+ if (!bits->eof)
+ state |= *bits->ptr++;
+ if (bits->ptr >= bits->end) {
+ bits->error = bits->eof;
+ bits->eof = 1;
+ }
+ } while (n > bits->bitsLeft);
+ bits->state |= state << (64 - bits->bitsLeft);
+}
+
+static uint32_t avifBitsRead(avifBits * const bits, const uint32_t n)
+{
+ if (n > bits->bitsLeft)
+ avifBitsRefill(bits, n);
+
+ const uint64_t state = bits->state;
+ bits->bitsLeft -= n;
+ bits->state <<= n;
+
+ return (uint32_t)(state >> (64 - n));
+}
+
+static uint32_t avifBitsReadUleb128(avifBits * bits)
+{
+ uint64_t val = 0;
+ uint32_t more;
+ uint32_t i = 0;
+
+ do {
+ const uint32_t v = avifBitsRead(bits, 8);
+ more = v & 0x80;
+ val |= ((uint64_t)(v & 0x7F)) << i;
+ i += 7;
+ } while (more && i < 56);
+
+ if (val > UINT32_MAX || more) {
+ bits->error = 1;
+ return 0;
+ }
+
+ return (uint32_t)val;
+}
+
+static uint32_t avifBitsReadVLC(avifBits * const bits)
+{
+ int numBits = 0;
+ while (!avifBitsRead(bits, 1))
+ if (++numBits == 32)
+ return 0xFFFFFFFFU;
+ return numBits ? ((1U << numBits) - 1) + avifBitsRead(bits, numBits) : 0;
+}
+
+// ---------------------------------------------------------------------------
+// Variables in here use snake_case to self-document from the AV1 spec:
+//
+// https://aomediacodec.github.io/av1-spec/av1-spec.pdf
+
+// Originally dav1d's parse_seq_hdr() function (heavily modified)
+static avifBool parseSequenceHeader(avifBits * bits, avifSequenceHeader * header)
+{
+ uint32_t seq_profile = avifBitsRead(bits, 3);
+ if (seq_profile > 2) {
+ return AVIF_FALSE;
+ }
+
+ uint32_t still_picture = avifBitsRead(bits, 1);
+ uint32_t reduced_still_picture_header = avifBitsRead(bits, 1);
+ if (reduced_still_picture_header && !still_picture) {
+ return AVIF_FALSE;
+ }
+
+ if (reduced_still_picture_header) {
+ avifBitsRead(bits, 5); // seq_level_idx
+ } else {
+ uint32_t timing_info_present_flag = avifBitsRead(bits, 1);
+ uint32_t decoder_model_info_present_flag = 0;
+ uint32_t buffer_delay_length = 0;
+ if (timing_info_present_flag) { // timing_info()
+ avifBitsRead(bits, 32); // num_units_in_display_tick
+ avifBitsRead(bits, 32); // time_scale
+ uint32_t equal_picture_interval = avifBitsRead(bits, 1);
+ if (equal_picture_interval) {
+ uint32_t num_ticks_per_picture_minus_1 = avifBitsReadVLC(bits);
+ if (num_ticks_per_picture_minus_1 == 0xFFFFFFFFU)
+ return AVIF_FALSE;
+ }
+
+ decoder_model_info_present_flag = avifBitsRead(bits, 1);
+ if (decoder_model_info_present_flag) { // decoder_model_info()
+ buffer_delay_length = avifBitsRead(bits, 5) + 1;
+ avifBitsRead(bits, 32); // num_units_in_decoding_tick
+ avifBitsRead(bits, 10); // buffer_removal_time_length_minus_1, frame_presentation_time_length_minus_1
+ }
+ }
+
+ uint32_t initial_display_delay_present_flag = avifBitsRead(bits, 1);
+ uint32_t operating_points_cnt = avifBitsRead(bits, 5) + 1;
+ for (uint32_t i = 0; i < operating_points_cnt; i++) {
+ avifBitsRead(bits, 12); // operating_point_idc
+ uint32_t seq_level_idx = avifBitsRead(bits, 5);
+ if (seq_level_idx > 7) {
+ avifBitsRead(bits, 1); // seq_tier
+ }
+ if (decoder_model_info_present_flag) {
+ uint32_t decoder_model_present_for_this_op = avifBitsRead(bits, 1);
+ if (decoder_model_present_for_this_op) { // operating_parameters_info()
+ avifBitsRead(bits, buffer_delay_length); // decoder_buffer_delay
+ avifBitsRead(bits, buffer_delay_length); // encoder_buffer_delay
+ avifBitsRead(bits, 1); // low_delay_mode_flag
+ }
+ }
+ if (initial_display_delay_present_flag) {
+ uint32_t initial_display_delay_present_for_this_op = avifBitsRead(bits, 1);
+ if (initial_display_delay_present_for_this_op) {
+ avifBitsRead(bits, 4); // initial_display_delay_minus_1
+ }
+ }
+ }
+ }
+
+ uint32_t frame_width_bits = avifBitsRead(bits, 4) + 1;
+ uint32_t frame_height_bits = avifBitsRead(bits, 4) + 1;
+ header->maxWidth = avifBitsRead(bits, frame_width_bits) + 1; // max_frame_width
+ header->maxHeight = avifBitsRead(bits, frame_height_bits) + 1; // max_frame_height
+ uint32_t frame_id_numbers_present_flag = 0;
+ if (!reduced_still_picture_header) {
+ frame_id_numbers_present_flag = avifBitsRead(bits, 1);
+ }
+ if (frame_id_numbers_present_flag) {
+ avifBitsRead(bits, 7); // delta_frame_id_length_minus_2, additional_frame_id_length_minus_1
+ }
+
+ avifBitsRead(bits, 3); // use_128x128_superblock, enable_filter_intra, enable_intra_edge_filter
+
+ if (!reduced_still_picture_header) {
+ avifBitsRead(bits, 4); // enable_interintra_compound, enable_masked_compound, enable_warped_motion, enable_dual_filter
+ uint32_t enable_order_hint = avifBitsRead(bits, 1);
+ if (enable_order_hint) {
+ avifBitsRead(bits, 2); // enable_jnt_comp, enable_ref_frame_mvs
+ }
+
+ uint32_t seq_force_screen_content_tools = 0;
+ uint32_t seq_choose_screen_content_tools = avifBitsRead(bits, 1);
+ if (seq_choose_screen_content_tools) {
+ seq_force_screen_content_tools = 2;
+ } else {
+ seq_force_screen_content_tools = avifBitsRead(bits, 1);
+ }
+ if (seq_force_screen_content_tools > 0) {
+ uint32_t seq_choose_integer_mv = avifBitsRead(bits, 1);
+ if (!seq_choose_integer_mv) {
+ avifBitsRead(bits, 1); // seq_force_integer_mv
+ }
+ }
+ if (enable_order_hint) {
+ avifBitsRead(bits, 3); // order_hint_bits_minus_1
+ }
+ }
+
+ avifBitsRead(bits, 3); // enable_superres, enable_cdef, enable_restoration
+
+ // color_config()
+ header->bitDepth = 8;
+ header->chromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN;
+ uint32_t high_bitdepth = avifBitsRead(bits, 1);
+ if ((seq_profile == 2) && high_bitdepth) {
+ uint32_t twelve_bit = avifBitsRead(bits, 1);
+ header->bitDepth = twelve_bit ? 12 : 10;
+ } else /* if (seq_profile <= 2) */ {
+ header->bitDepth = high_bitdepth ? 10 : 8;
+ }
+ uint32_t mono_chrome = 0;
+ if (seq_profile != 1) {
+ mono_chrome = avifBitsRead(bits, 1);
+ }
+ uint32_t color_description_present_flag = avifBitsRead(bits, 1);
+ if (color_description_present_flag) {
+ header->colorPrimaries = (avifColorPrimaries)avifBitsRead(bits, 8); // color_primaries
+ header->transferCharacteristics = (avifTransferCharacteristics)avifBitsRead(bits, 8); // transfer_characteristics
+ header->matrixCoefficients = (avifMatrixCoefficients)avifBitsRead(bits, 8); // matrix_coefficients
+ } else {
+ header->colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED;
+ header->transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED;
+ header->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED;
+ }
+ if (mono_chrome) {
+ header->range = avifBitsRead(bits, 1) ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; // color_range
+ // subsampling_x = 1;
+ // subsampling_y = 1;
+ header->yuvFormat = AVIF_PIXEL_FORMAT_YUV400;
+ } else if (header->colorPrimaries == AVIF_COLOR_PRIMARIES_BT709 &&
+ header->transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_SRGB &&
+ header->matrixCoefficients == AVIF_MATRIX_COEFFICIENTS_IDENTITY) {
+ header->range = AVIF_RANGE_FULL;
+ // subsampling_x = 0;
+ // subsampling_y = 0;
+ header->yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ } else {
+ uint32_t subsampling_x = 0;
+ uint32_t subsampling_y = 0;
+ header->range = avifBitsRead(bits, 1) ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; // color_range
+ switch (seq_profile) {
+ case 0:
+ subsampling_x = 1;
+ subsampling_y = 1;
+ header->yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
+ break;
+ case 1:
+ subsampling_x = 0;
+ subsampling_y = 0;
+ header->yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ break;
+ case 2:
+ if (header->bitDepth == 12) {
+ subsampling_x = avifBitsRead(bits, 1);
+ if (subsampling_x) {
+ subsampling_y = avifBitsRead(bits, 1);
+ }
+ } else {
+ subsampling_x = 1;
+ subsampling_y = 0;
+ }
+ if (subsampling_x) {
+ header->yuvFormat = subsampling_y ? AVIF_PIXEL_FORMAT_YUV420 : AVIF_PIXEL_FORMAT_YUV422;
+ } else {
+ header->yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ }
+ break;
+ }
+
+ if (subsampling_x && subsampling_y) {
+ header->chromaSamplePosition = (avifChromaSamplePosition)avifBitsRead(bits, 2); // chroma_sample_position
+ }
+ }
+
+ if (!mono_chrome) {
+ avifBitsRead(bits, 1); // separate_uv_delta_q
+ }
+ avifBitsRead(bits, 1); // film_grain_params_present
+
+ return !bits->error;
+}
+
+avifBool avifSequenceHeaderParse(avifSequenceHeader * header, const avifROData * sample)
+{
+ avifROData obus = *sample;
+
+ // Find the sequence header OBU
+ while (obus.size > 0) {
+ avifBits bits;
+ avifBitsInit(&bits, obus.data, obus.size);
+
+ // obu_header()
+ avifBitsRead(&bits, 1); // obu_forbidden_bit
+ const uint32_t obu_type = avifBitsRead(&bits, 4);
+ const uint32_t obu_extension_flag = avifBitsRead(&bits, 1);
+ const uint32_t obu_has_size_field = avifBitsRead(&bits, 1);
+ avifBitsRead(&bits, 1); // obu_reserved_1bit
+
+ if (obu_extension_flag) { // obu_extension_header()
+ avifBitsRead(&bits, 8); // temporal_id, spatial_id, extension_header_reserved_3bits
+ }
+
+ uint32_t obu_size = 0;
+ if (obu_has_size_field)
+ obu_size = avifBitsReadUleb128(&bits);
+ else
+ obu_size = (int)obus.size - 1 - obu_extension_flag;
+
+ if (bits.error) {
+ return AVIF_FALSE;
+ }
+
+ const uint32_t init_bit_pos = avifBitsReadPos(&bits);
+ const uint32_t init_byte_pos = init_bit_pos >> 3;
+ if (obu_size > obus.size - init_byte_pos)
+ return AVIF_FALSE;
+
+ if (obu_type == 1) { // Sequence Header
+ return parseSequenceHeader(&bits, header);
+ }
+
+ // Skip this OBU
+ obus.data += obu_size + init_byte_pos;
+ obus.size -= obu_size + init_byte_pos;
+ }
+ return AVIF_FALSE;
+}
diff --git a/chromium/third_party/libavif/src/src/read.c b/chromium/third_party/libavif/src/src/read.c
index 51daf439a94..bb0d39f4c84 100644
--- a/chromium/third_party/libavif/src/src/read.c
+++ b/chromium/third_party/libavif/src/src/read.c
@@ -7,7 +7,6 @@
#define AUXTYPE_SIZE 64
#define CONTENTTYPE_SIZE 64
-#define MAX_COMPATIBLE_BRANDS 32
// class VisualSampleEntry(codingname) extends SampleEntry(codingname) {
// unsigned int(16) pre_defined = 0;
@@ -39,7 +38,8 @@ typedef struct avifFileType
{
uint8_t majorBrand[4];
uint32_t minorVersion;
- uint8_t compatibleBrands[4 * MAX_COMPATIBLE_BRANDS];
+ // If not null, points to a memory block of 4 * compatibleBrandsCount bytes.
+ const uint8_t * compatibleBrands;
int compatibleBrandsCount;
} avifFileType;
@@ -86,57 +86,57 @@ typedef struct avifPixelInformationProperty
// ---------------------------------------------------------------------------
// Top-level structures
+struct avifMeta;
+
+// Temporary storage for ipco/stsd contents until they can be associated and memcpy'd to an avifDecoderItem
+typedef struct avifProperty
+{
+ uint8_t type[4];
+ union
+ {
+ avifImageSpatialExtents ispe;
+ avifAuxiliaryType auxC;
+ avifColourInformationBox colr;
+ avifCodecConfigurationBox av1C;
+ avifPixelAspectRatioBox pasp;
+ avifCleanApertureBox clap;
+ avifImageRotation irot;
+ avifImageMirror imir;
+ avifPixelInformationProperty pixi;
+ } u;
+} avifProperty;
+AVIF_ARRAY_DECLARE(avifPropertyArray, avifProperty, prop);
+
+static const avifProperty * avifPropertyArrayFind(const avifPropertyArray * properties, const char * type)
+{
+ for (uint32_t propertyIndex = 0; propertyIndex < properties->count; ++propertyIndex) {
+ avifProperty * prop = &properties->prop[propertyIndex];
+ if (!memcmp(prop->type, type, 4)) {
+ return prop;
+ }
+ }
+ return NULL;
+}
+
// one "item" worth for decoding (all iref, iloc, iprp, etc refer to one of these)
typedef struct avifDecoderItem
{
uint32_t id;
+ struct avifMeta * meta; // Unowned; A back-pointer for convenience
uint8_t type[4];
uint32_t offset;
uint32_t size;
uint32_t idatID; // If non-zero, offset is relative to this idat box (iloc construction_method==1)
- avifBool ispePresent;
- avifImageSpatialExtents ispe;
- avifBool auxCPresent;
- avifAuxiliaryType auxC;
avifContentType contentType;
- avifBool colrPresent;
- avifColourInformationBox colr;
- avifBool av1CPresent;
- avifCodecConfigurationBox av1C;
- avifBool paspPresent;
- avifPixelAspectRatioBox pasp;
- avifBool clapPresent;
- avifCleanApertureBox clap;
- avifBool irotPresent;
- avifImageRotation irot;
- avifBool imirPresent;
- avifImageMirror imir;
- avifBool pixiPresent;
- avifPixelInformationProperty pixi;
+ avifPropertyArray properties;
uint32_t thumbnailForID; // if non-zero, this item is a thumbnail for Item #{thumbnailForID}
uint32_t auxForID; // if non-zero, this item is an auxC plane for Item #{auxForID}
uint32_t descForID; // if non-zero, this item is a content description for Item #{descForID}
uint32_t dimgForID; // if non-zero, this item is a derived image for Item #{dimgForID}
- avifBool hasUnsupportedEssentialProperty; // If true, this file cites a property flagged as 'essential' that libavif doesn't support (yet). Ignore the item, if so.
+ avifBool hasUnsupportedEssentialProperty; // If true, this item cites a property flagged as 'essential' that libavif doesn't support (yet). Ignore the item, if so.
} avifDecoderItem;
AVIF_ARRAY_DECLARE(avifDecoderItemArray, avifDecoderItem, item);
-// Temporary storage for ipco contents until they can be associated and memcpy'd to an avifDecoderItem
-typedef struct avifProperty
-{
- uint8_t type[4];
- avifImageSpatialExtents ispe;
- avifAuxiliaryType auxC;
- avifColourInformationBox colr;
- avifCodecConfigurationBox av1C;
- avifPixelAspectRatioBox pasp;
- avifCleanApertureBox clap;
- avifImageRotation irot;
- avifImageMirror imir;
- avifPixelInformationProperty pixi;
-} avifProperty;
-AVIF_ARRAY_DECLARE(avifPropertyArray, avifProperty, prop);
-
// idat storage
typedef struct avifDecoderItemData
{
@@ -193,8 +193,7 @@ AVIF_ARRAY_DECLARE(avifSyncSampleArray, avifSyncSample, syncSample);
typedef struct avifSampleDescription
{
uint8_t format[4];
- avifBool av1CPresent;
- avifCodecConfigurationBox av1C;
+ avifPropertyArray properties;
} avifSampleDescription;
AVIF_ARRAY_DECLARE(avifSampleDescriptionArray, avifSampleDescription, description);
@@ -225,6 +224,10 @@ static avifSampleTable * avifSampleTableCreate()
static void avifSampleTableDestroy(avifSampleTable * sampleTable)
{
avifArrayDestroy(&sampleTable->chunks);
+ for (uint32_t i = 0; i < sampleTable->sampleDescriptions.count; ++i) {
+ avifSampleDescription * description = &sampleTable->sampleDescriptions.description[i];
+ avifArrayDestroy(&description->properties);
+ }
avifArrayDestroy(&sampleTable->sampleDescriptions);
avifArrayDestroy(&sampleTable->sampleToChunks);
avifArrayDestroy(&sampleTable->sampleSizes);
@@ -268,15 +271,15 @@ static uint32_t avifCodecConfigurationBoxGetDepth(const avifCodecConfigurationBo
return 8;
}
-static uint32_t avifSampleTableGetDepth(const avifSampleTable * sampleTable)
+static const avifPropertyArray * avifSampleTableGetProperties(const avifSampleTable * sampleTable)
{
for (uint32_t i = 0; i < sampleTable->sampleDescriptions.count; ++i) {
const avifSampleDescription * description = &sampleTable->sampleDescriptions.description[i];
- if (!memcmp(description->format, "av01", 4) && description->av1CPresent) {
- return avifCodecConfigurationBoxGetDepth(&description->av1C);
+ if (!memcmp(description->format, "av01", 4)) {
+ return &description->properties;
}
}
- return 0;
+ return NULL;
}
// one video track ("trak" contents)
@@ -289,6 +292,7 @@ typedef struct avifTrack
uint32_t width;
uint32_t height;
avifSampleTable * sampleTable;
+ struct avifMeta * meta;
} avifTrack;
AVIF_ARRAY_DECLARE(avifTrackArray, avifTrack, track);
@@ -299,7 +303,7 @@ avifCodecDecodeInput * avifCodecDecodeInputCreate(void)
{
avifCodecDecodeInput * decodeInput = (avifCodecDecodeInput *)avifAlloc(sizeof(avifCodecDecodeInput));
memset(decodeInput, 0, sizeof(avifCodecDecodeInput));
- avifArrayCreate(&decodeInput->samples, sizeof(avifSample), 1);
+ avifArrayCreate(&decodeInput->samples, sizeof(avifDecodeSample), 1);
return decodeInput;
}
@@ -341,7 +345,7 @@ static avifBool avifCodecDecodeInputGetSamples(avifCodecDecodeInput * decodeInpu
sampleSize = sampleSizePtr->size;
}
- avifSample * sample = (avifSample *)avifArrayPushPtr(&decodeInput->samples);
+ avifDecodeSample * sample = (avifDecodeSample *)avifArrayPushPtr(&decodeInput->samples);
sample->data.data = rawInput->data + sampleOffset;
sample->data.size = sampleSize;
sample->sync = AVIF_FALSE; // to potentially be set to true following the outer loop
@@ -371,6 +375,16 @@ static avifBool avifCodecDecodeInputGetSamples(avifCodecDecodeInput * decodeInpu
}
// ---------------------------------------------------------------------------
+// Helper macros
+
+#define BEGIN_STREAM(VARNAME, PTR, SIZE) \
+ avifROStream VARNAME; \
+ avifROData VARNAME##_roData; \
+ VARNAME##_roData.data = PTR; \
+ VARNAME##_roData.size = SIZE; \
+ avifROStreamStart(&VARNAME, &VARNAME##_roData)
+
+// ---------------------------------------------------------------------------
// avifDecoderData
typedef struct avifTile
@@ -381,12 +395,94 @@ typedef struct avifTile
} avifTile;
AVIF_ARRAY_DECLARE(avifTileArray, avifTile, tile);
-typedef struct avifDecoderData
-{
- avifFileType ftyp;
+// This holds one "meta" box (from the BMFF and HEIF standards) worth of relevant-to-AVIF information.
+// * If a meta box is parsed from the root level of the BMFF, it can contain the information about
+// "items" which might be color planes, alpha planes, or EXIF or XMP metadata.
+// * If a meta box is parsed from inside of a track ("trak") box, any metadata (EXIF/XMP) items inside
+// of that box are implicitly associated with that track.
+typedef struct avifMeta
+{
+ // Items (from HEIF) are the generic storage for any data that does not require timed processing
+ // (single image color planes, alpha planes, EXIF, XMP, etc). Each item has a unique integer ID >1,
+ // and is defined by a series of child boxes in a meta box:
+ // * iloc - location: byte offset to item data, item size in bytes
+ // * iinf - information: type of item (color planes, alpha plane, EXIF, XMP)
+ // * ipco - properties: dimensions, aspect ratio, image transformations, references to other items
+ // * ipma - associations: Attaches an item in the properties list to a given item
+ //
+ // Items are lazily created in this array when any of the above boxes refer to one by a new (unseen) ID,
+ // and are then further modified/updated as new information for an item's ID is parsed.
avifDecoderItemArray items;
+
+ // Any ipco boxes explained above are populated into this array as a staging area, which are
+ // then duplicated into the appropriate items upon encountering an item property association
+ // (ipma) box.
avifPropertyArray properties;
+
+ // Filled with the contents of "idat" boxes, which are raw data that an item can directly refer to in its
+ // item location box (iloc) instead of just giving an offset into the overall file. If all items' iloc boxes
+ // simply point at an offset/length in the file itself, this array will likely be empty.
avifDecoderItemDataArray idats;
+
+ // Ever-incrementing ID for uniquely identifying which 'meta' box contains an idat (when
+ // multiple meta boxes exist as BMFF siblings). Each time avifParseMetaBox() is called on an
+ // avifMeta struct, this value is incremented. Any time an additional meta box is detected at
+ // the same "level" (root level, trak level, etc), this ID helps distinguish which meta box's
+ // "idat" is which, as items implicitly reference idat boxes that exist in the same meta
+ // box.
+ uint32_t idatID;
+
+ // Contents of a pitm box, which signal which of the items in this file is the main image. For
+ // AVIF, this should point at an av01 type item containing color planes, and all other items
+ // are ignored unless they refer to this item in some way (alpha plane, EXIF/XMP metadata).
+ uint32_t primaryItemID;
+} avifMeta;
+
+static avifMeta * avifMetaCreate()
+{
+ avifMeta * meta = (avifMeta *)avifAlloc(sizeof(avifMeta));
+ memset(meta, 0, sizeof(avifMeta));
+ avifArrayCreate(&meta->items, sizeof(avifDecoderItem), 8);
+ avifArrayCreate(&meta->properties, sizeof(avifProperty), 16);
+ avifArrayCreate(&meta->idats, sizeof(avifDecoderItemData), 1);
+ return meta;
+}
+
+static void avifMetaDestroy(avifMeta * meta)
+{
+ for (uint32_t i = 0; i < meta->items.count; ++i) {
+ avifDecoderItem * item = &meta->items.item[i];
+ avifArrayDestroy(&item->properties);
+ }
+ avifArrayDestroy(&meta->items);
+ avifArrayDestroy(&meta->properties);
+ avifArrayDestroy(&meta->idats);
+ avifFree(meta);
+}
+
+static avifDecoderItem * avifMetaFindItem(avifMeta * meta, uint32_t itemID)
+{
+ if (itemID == 0) {
+ return NULL;
+ }
+
+ for (uint32_t i = 0; i < meta->items.count; ++i) {
+ if (meta->items.item[i].id == itemID) {
+ return &meta->items.item[i];
+ }
+ }
+
+ avifDecoderItem * item = (avifDecoderItem *)avifArrayPushPtr(&meta->items);
+ avifArrayCreate(&item->properties, sizeof(avifProperty), 16);
+ item->id = itemID;
+ item->meta = meta;
+ return item;
+}
+
+typedef struct avifDecoderData
+{
+ avifFileType ftyp;
+ avifMeta * meta; // The root-level meta box
avifTrackArray tracks;
avifROData rawInput;
avifTileArray tiles;
@@ -396,23 +492,19 @@ typedef struct avifDecoderData
avifImageGrid alphaGrid;
avifDecoderSource source;
const avifSampleTable * sourceSampleTable; // NULL unless (source == AVIF_DECODER_SOURCE_TRACKS), owned by an avifTrack
- uint32_t primaryItemID;
- uint32_t metaBoxID; // Ever-incrementing ID for uniquely identifying which 'meta' box contains an idat
- avifBool cicpSet; // True if avifDecoder's image has had its CICP set correctly yet.
- // This allows nclx colr boxes to override AV1 CICP, as specified in the MIAF
- // standard (ISO/IEC 23000-22:2019), section 7.3.6.4:
- //
- // "The colour information property takes precedence over any colour information in the image
- // bitstream, i.e. if the property is present, colour information in the bitstream shall be ignored."
+ avifBool cicpSet; // True if avifDecoder's image has had its CICP set correctly yet.
+ // This allows nclx colr boxes to override AV1 CICP, as specified in the MIAF
+ // standard (ISO/IEC 23000-22:2019), section 7.3.6.4:
+ //
+ // "The colour information property takes precedence over any colour information in the image
+ // bitstream, i.e. if the property is present, colour information in the bitstream shall be ignored."
} avifDecoderData;
static avifDecoderData * avifDecoderDataCreate()
{
avifDecoderData * data = (avifDecoderData *)avifAlloc(sizeof(avifDecoderData));
memset(data, 0, sizeof(avifDecoderData));
- avifArrayCreate(&data->items, sizeof(avifDecoderItem), 8);
- avifArrayCreate(&data->properties, sizeof(avifProperty), 16);
- avifArrayCreate(&data->idats, sizeof(avifDecoderItemData), 1);
+ data->meta = avifMetaCreate();
avifArrayCreate(&data->tracks, sizeof(avifTrack), 2);
avifArrayCreate(&data->tiles, sizeof(avifTile), 8);
return data;
@@ -440,6 +532,13 @@ static avifTile * avifDecoderDataCreateTile(avifDecoderData * data)
return tile;
}
+static avifTrack * avifDecoderDataCreateTrack(avifDecoderData * data)
+{
+ avifTrack * track = (avifTrack *)avifArrayPushPtr(&data->tracks);
+ track->meta = avifMetaCreate();
+ return track;
+}
+
static void avifDecoderDataClearTiles(avifDecoderData * data)
{
for (unsigned int i = 0; i < data->tiles.count; ++i) {
@@ -464,12 +563,14 @@ static void avifDecoderDataClearTiles(avifDecoderData * data)
static void avifDecoderDataDestroy(avifDecoderData * data)
{
- avifArrayDestroy(&data->items);
- avifArrayDestroy(&data->properties);
- avifArrayDestroy(&data->idats);
+ avifMetaDestroy(data->meta);
for (uint32_t i = 0; i < data->tracks.count; ++i) {
- if (data->tracks.track[i].sampleTable) {
- avifSampleTableDestroy(data->tracks.track[i].sampleTable);
+ avifTrack * track = &data->tracks.track[i];
+ if (track->sampleTable) {
+ avifSampleTableDestroy(track->sampleTable);
+ }
+ if (track->meta) {
+ avifMetaDestroy(track->meta);
}
}
avifArrayDestroy(&data->tracks);
@@ -478,23 +579,6 @@ static void avifDecoderDataDestroy(avifDecoderData * data)
avifFree(data);
}
-static avifDecoderItem * avifDecoderDataFindItem(avifDecoderData * data, uint32_t itemID)
-{
- if (itemID == 0) {
- return NULL;
- }
-
- for (uint32_t i = 0; i < data->items.count; ++i) {
- if (data->items.item[i].id == itemID) {
- return &data->items.item[i];
- }
- }
-
- avifDecoderItem * item = (avifDecoderItem *)avifArrayPushPtr(&data->items);
- item->id = itemID;
- return item;
-}
-
static const uint8_t * avifDecoderDataCalcItemPtr(avifDecoderData * data, avifDecoderItem * item)
{
avifROData * offsetBuffer = NULL;
@@ -506,9 +590,9 @@ static const uint8_t * avifDecoderDataCalcItemPtr(avifDecoderData * data, avifDe
// construction_method: idat(1)
// Find associated idat block
- for (uint32_t i = 0; i < data->idats.count; ++i) {
- if (data->idats.idat[i].id == item->idatID) {
- offsetBuffer = &data->idats.idat[i].data;
+ for (uint32_t i = 0; i < item->meta->idats.count; ++i) {
+ if (item->meta->idats.idat[i].id == item->idatID) {
+ offsetBuffer = &item->meta->idats.idat[i].data;
break;
}
}
@@ -535,15 +619,16 @@ static avifBool avifDecoderDataGenerateImageGridTiles(avifDecoderData * data, av
// Count number of dimg for this item, bail out if it doesn't match perfectly
unsigned int tilesAvailable = 0;
- for (uint32_t i = 0; i < data->items.count; ++i) {
- avifDecoderItem * item = &data->items.item[i];
+ for (uint32_t i = 0; i < gridItem->meta->items.count; ++i) {
+ avifDecoderItem * item = &gridItem->meta->items.item[i];
if (item->dimgForID == gridItem->id) {
if (memcmp(item->type, "av01", 4)) {
continue;
}
if (item->hasUnsupportedEssentialProperty) {
- // An essential property isn't supported by libavif; ignore the item.
- continue;
+ // An essential property isn't supported by libavif; can't
+ // decode a grid image if any tile in the grid isn't supported.
+ return AVIF_FALSE;
}
++tilesAvailable;
@@ -554,23 +639,33 @@ static avifBool avifDecoderDataGenerateImageGridTiles(avifDecoderData * data, av
return AVIF_FALSE;
}
- for (uint32_t i = 0; i < data->items.count; ++i) {
- avifDecoderItem * item = &data->items.item[i];
+ avifBool firstTile = AVIF_TRUE;
+ for (uint32_t i = 0; i < gridItem->meta->items.count; ++i) {
+ avifDecoderItem * item = &gridItem->meta->items.item[i];
if (item->dimgForID == gridItem->id) {
if (memcmp(item->type, "av01", 4)) {
continue;
}
- if (item->hasUnsupportedEssentialProperty) {
- // An essential property isn't supported by libavif; ignore the item.
- continue;
- }
avifTile * tile = avifDecoderDataCreateTile(data);
- avifSample * sample = (avifSample *)avifArrayPushPtr(&tile->input->samples);
+ avifDecodeSample * sample = (avifDecodeSample *)avifArrayPushPtr(&tile->input->samples);
sample->data.data = avifDecoderDataCalcItemPtr(data, item);
sample->data.size = item->size;
sample->sync = AVIF_TRUE;
tile->input->alpha = alpha;
+
+ if (firstTile) {
+ firstTile = AVIF_FALSE;
+
+ // Adopt the av1C property of the first av01 tile, so that it can be queried from
+ // the top-level color/alpha item during avifDecoderReset().
+ const avifProperty * srcProp = avifPropertyArrayFind(&item->properties, "av1C");
+ if (!srcProp) {
+ return AVIF_FALSE;
+ }
+ avifProperty * dstProp = (avifProperty *)avifArrayPushPtr(&gridItem->properties);
+ memcpy(dstProp, srcProp, sizeof(avifProperty));
+ }
}
}
return AVIF_TRUE;
@@ -626,6 +721,9 @@ static avifBool avifDecoderDataFillImageGrid(avifDecoderData * data,
dstImage->matrixCoefficients = firstTile->image->matrixCoefficients;
}
}
+ if (alpha) {
+ dstImage->alphaRange = firstTile->image->alphaRange;
+ }
avifImageAllocatePlanes(dstImage, alpha ? AVIF_PLANES_A : AVIF_PLANES_YUV);
@@ -652,7 +750,13 @@ static avifBool avifDecoderDataFillImageGrid(avifDecoderData * data,
// Y and A channels
size_t yaColOffset = colIndex * firstTile->image->width;
+ if (yaColOffset >= grid->outputWidth) {
+ return AVIF_FALSE;
+ }
size_t yaRowOffset = rowIndex * firstTile->image->height;
+ if (yaRowOffset >= grid->outputHeight) {
+ return AVIF_FALSE;
+ }
size_t yaRowBytes = widthToCopy * pixelBytes;
if (alpha) {
@@ -698,10 +802,57 @@ static avifBool avifDecoderDataFillImageGrid(avifDecoderData * data,
return AVIF_TRUE;
}
+// If colorId == 0 (a sentinel value as item IDs must be nonzero), accept any found EXIF/XMP metadata. Passing in 0
+// is used when finding metadata in a meta box embedded in a trak box, as any items inside of a meta box that is
+// inside of a trak box are implicitly associated to the track.
+static avifBool avifDecoderDataFindMetadata(avifDecoderData * data, avifMeta * meta, avifImage * image, uint32_t colorId)
+{
+ avifROData exifData = AVIF_DATA_EMPTY;
+ avifROData xmpData = AVIF_DATA_EMPTY;
+
+ for (uint32_t itemIndex = 0; itemIndex < meta->items.count; ++itemIndex) {
+ avifDecoderItem * item = &meta->items.item[itemIndex];
+ if (!item->size) {
+ continue;
+ }
+ if (item->hasUnsupportedEssentialProperty) {
+ // An essential property isn't supported by libavif; ignore the item.
+ continue;
+ }
+
+ if ((colorId > 0) && (item->descForID != colorId)) {
+ // Not a content description (metadata) for the colorOBU, skip it
+ continue;
+ }
+
+ if (!memcmp(item->type, "Exif", 4)) {
+ // Advance past Annex A.2.1's header
+ const uint8_t * boxPtr = avifDecoderDataCalcItemPtr(data, item);
+ BEGIN_STREAM(exifBoxStream, boxPtr, item->size);
+ uint32_t exifTiffHeaderOffset;
+ CHECK(avifROStreamReadU32(&exifBoxStream, &exifTiffHeaderOffset)); // unsigned int(32) exif_tiff_header_offset;
+
+ exifData.data = avifROStreamCurrent(&exifBoxStream);
+ exifData.size = avifROStreamRemainingBytes(&exifBoxStream);
+ } else if (!memcmp(item->type, "mime", 4) && !memcmp(item->contentType.contentType, xmpContentType, xmpContentTypeSize)) {
+ xmpData.data = avifDecoderDataCalcItemPtr(data, item);
+ xmpData.size = item->size;
+ }
+ }
+
+ if (exifData.data && exifData.size) {
+ avifImageSetMetadataExif(image, exifData.data, exifData.size);
+ }
+ if (xmpData.data && xmpData.size) {
+ avifImageSetMetadataXMP(image, xmpData.data, xmpData.size);
+ }
+ return AVIF_TRUE;
+}
+
// ---------------------------------------------------------------------------
// URN
-static avifBool isAlphaURN(char * urn)
+static avifBool isAlphaURN(const char * urn)
{
return !strcmp(urn, URN_ALPHA0) || !strcmp(urn, URN_ALPHA1);
}
@@ -709,20 +860,12 @@ static avifBool isAlphaURN(char * urn)
// ---------------------------------------------------------------------------
// BMFF Parsing
-#define BEGIN_STREAM(VARNAME, PTR, SIZE) \
- avifROStream VARNAME; \
- avifROData VARNAME##_roData; \
- VARNAME##_roData.data = PTR; \
- VARNAME##_roData.size = SIZE; \
- avifROStreamStart(&VARNAME, &VARNAME##_roData)
-
-static avifBool avifParseItemLocationBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemLocationBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
uint8_t version;
- uint8_t flags[3];
- CHECK(avifROStreamReadVersionAndFlags(&s, &version, flags));
+ CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL));
if (version > 2) {
return AVIF_FALSE;
}
@@ -773,7 +916,7 @@ static avifBool avifParseItemLocationBox(avifDecoderData * data, const uint8_t *
return AVIF_FALSE;
}
if (constructionMethod == 1) {
- idatID = data->metaBoxID;
+ idatID = meta->idatID;
}
}
@@ -794,7 +937,7 @@ static avifBool avifParseItemLocationBox(avifDecoderData * data, const uint8_t *
uint64_t extentLength; // unsigned int(offset_size*8) extent_length;
CHECK(avifROStreamReadUX8(&s, &extentLength, lengthSize));
- avifDecoderItem * item = avifDecoderDataFindItem(data, itemID);
+ avifDecoderItem * item = avifMetaFindItem(meta, itemID);
if (!item) {
return AVIF_FALSE;
}
@@ -840,58 +983,63 @@ static avifBool avifParseImageGridBox(avifImageGrid * grid, const uint8_t * raw,
CHECK(avifROStreamReadU32(&s, &grid->outputWidth)); // unsigned int(FieldLength) output_width;
CHECK(avifROStreamReadU32(&s, &grid->outputHeight)); // unsigned int(FieldLength) output_height;
}
+ if (grid->outputWidth > AVIF_MAX_IMAGE_SIZE / grid->outputHeight) {
+ return AVIF_FALSE;
+ }
return AVIF_TRUE;
}
-static avifBool avifParseImageSpatialExtentsProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseImageSpatialExtentsProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
- CHECK(avifROStreamReadU32(&s, &data->properties.prop[propertyIndex].ispe.width));
- CHECK(avifROStreamReadU32(&s, &data->properties.prop[propertyIndex].ispe.height));
+ avifImageSpatialExtents * ispe = &prop->u.ispe;
+ CHECK(avifROStreamReadU32(&s, &ispe->width));
+ CHECK(avifROStreamReadU32(&s, &ispe->height));
return AVIF_TRUE;
}
-static avifBool avifParseAuxiliaryTypeProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseAuxiliaryTypeProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
- CHECK(avifROStreamReadString(&s, data->properties.prop[propertyIndex].auxC.auxType, AUXTYPE_SIZE));
+ CHECK(avifROStreamReadString(&s, prop->u.auxC.auxType, AUXTYPE_SIZE));
return AVIF_TRUE;
}
-static avifBool avifParseColourInformationBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseColourInformationBox(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- data->properties.prop[propertyIndex].colr.hasICC = AVIF_FALSE;
- data->properties.prop[propertyIndex].colr.hasNCLX = AVIF_FALSE;
+ avifColourInformationBox * colr = &prop->u.colr;
+ colr->hasICC = AVIF_FALSE;
+ colr->hasNCLX = AVIF_FALSE;
uint8_t colorType[4]; // unsigned int(32) colour_type;
CHECK(avifROStreamRead(&s, colorType, 4));
if (!memcmp(colorType, "rICC", 4) || !memcmp(colorType, "prof", 4)) {
- data->properties.prop[propertyIndex].colr.hasICC = AVIF_TRUE;
- data->properties.prop[propertyIndex].colr.icc = avifROStreamCurrent(&s);
- data->properties.prop[propertyIndex].colr.iccSize = avifROStreamRemainingBytes(&s);
+ colr->hasICC = AVIF_TRUE;
+ colr->icc = avifROStreamCurrent(&s);
+ colr->iccSize = avifROStreamRemainingBytes(&s);
} else if (!memcmp(colorType, "nclx", 4)) {
uint16_t tmp16;
// unsigned int(16) colour_primaries;
CHECK(avifROStreamReadU16(&s, &tmp16));
- data->properties.prop[propertyIndex].colr.colorPrimaries = (avifColorPrimaries)tmp16;
+ colr->colorPrimaries = (avifColorPrimaries)tmp16;
// unsigned int(16) transfer_characteristics;
CHECK(avifROStreamReadU16(&s, &tmp16));
- data->properties.prop[propertyIndex].colr.transferCharacteristics = (avifTransferCharacteristics)tmp16;
+ colr->transferCharacteristics = (avifTransferCharacteristics)tmp16;
// unsigned int(16) matrix_coefficients;
CHECK(avifROStreamReadU16(&s, &tmp16));
- data->properties.prop[propertyIndex].colr.matrixCoefficients = (avifMatrixCoefficients)tmp16;
+ colr->matrixCoefficients = (avifMatrixCoefficients)tmp16;
// unsigned int(1) full_range_flag;
// unsigned int(7) reserved = 0;
uint8_t tmp8;
CHECK(avifROStreamRead(&s, &tmp8, 1));
- data->properties.prop[propertyIndex].colr.range = (avifRange)(tmp8 & 0x80);
- data->properties.prop[propertyIndex].colr.hasNCLX = AVIF_TRUE;
+ colr->range = (tmp8 & 0x80) ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED;
+ colr->hasNCLX = AVIF_TRUE;
}
return AVIF_TRUE;
}
@@ -924,26 +1072,26 @@ static avifBool avifParseAV1CodecConfigurationBox(const uint8_t * raw, size_t ra
return AVIF_TRUE;
}
-static avifBool avifParseAV1CodecConfigurationBoxProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseAV1CodecConfigurationBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
- return avifParseAV1CodecConfigurationBox(raw, rawLen, &data->properties.prop[propertyIndex].av1C);
+ return avifParseAV1CodecConfigurationBox(raw, rawLen, &prop->u.av1C);
}
-static avifBool avifParsePixelAspectRatioBoxProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParsePixelAspectRatioBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- avifPixelAspectRatioBox * pasp = &data->properties.prop[propertyIndex].pasp;
+ avifPixelAspectRatioBox * pasp = &prop->u.pasp;
CHECK(avifROStreamReadU32(&s, &pasp->hSpacing)); // unsigned int(32) hSpacing;
CHECK(avifROStreamReadU32(&s, &pasp->vSpacing)); // unsigned int(32) vSpacing;
return AVIF_TRUE;
}
-static avifBool avifParseCleanApertureBoxProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseCleanApertureBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- avifCleanApertureBox * clap = &data->properties.prop[propertyIndex].clap;
+ avifCleanApertureBox * clap = &prop->u.clap;
CHECK(avifROStreamReadU32(&s, &clap->widthN)); // unsigned int(32) cleanApertureWidthN;
CHECK(avifROStreamReadU32(&s, &clap->widthD)); // unsigned int(32) cleanApertureWidthD;
CHECK(avifROStreamReadU32(&s, &clap->heightN)); // unsigned int(32) cleanApertureHeightN;
@@ -955,11 +1103,11 @@ static avifBool avifParseCleanApertureBoxProperty(avifDecoderData * data, const
return AVIF_TRUE;
}
-static avifBool avifParseImageRotationProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseImageRotationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- avifImageRotation * irot = &data->properties.prop[propertyIndex].irot;
+ avifImageRotation * irot = &prop->u.irot;
CHECK(avifROStreamRead(&s, &irot->angle, 1)); // unsigned int (6) reserved = 0; unsigned int (2) angle;
if ((irot->angle & 0xfc) != 0) {
// reserved bits must be 0
@@ -968,11 +1116,11 @@ static avifBool avifParseImageRotationProperty(avifDecoderData * data, const uin
return AVIF_TRUE;
}
-static avifBool avifParseImageMirrorProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParseImageMirrorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- avifImageMirror * imir = &data->properties.prop[propertyIndex].imir;
+ avifImageMirror * imir = &prop->u.imir;
CHECK(avifROStreamRead(&s, &imir->axis, 1)); // unsigned int (7) reserved = 0; unsigned int (1) axis;
if ((imir->axis & 0xfe) != 0) {
// reserved bits must be 0
@@ -981,12 +1129,12 @@ static avifBool avifParseImageMirrorProperty(avifDecoderData * data, const uint8
return AVIF_TRUE;
}
-static avifBool avifParsePixelInformationProperty(avifDecoderData * data, const uint8_t * raw, size_t rawLen, int propertyIndex)
+static avifBool avifParsePixelInformationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
- avifPixelInformationProperty * pixi = &data->properties.prop[propertyIndex].pixi;
+ avifPixelInformationProperty * pixi = &prop->u.pixi;
CHECK(avifROStreamRead(&s, &pixi->planeCount, 1)); // unsigned int (8) num_channels;
if (pixi->planeCount > MAX_PIXI_PLANE_DEPTHS) {
return AVIF_FALSE;
@@ -997,7 +1145,7 @@ static avifBool avifParsePixelInformationProperty(avifDecoderData * data, const
return AVIF_TRUE;
}
-static avifBool avifParseItemPropertyContainerBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemPropertyContainerBox(avifPropertyArray * properties, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1005,34 +1153,27 @@ static avifBool avifParseItemPropertyContainerBox(avifDecoderData * data, const
avifBoxHeader header;
CHECK(avifROStreamReadBoxHeader(&s, &header));
- int propertyIndex = avifArrayPushIndex(&data->properties);
- memcpy(data->properties.prop[propertyIndex].type, header.type, 4);
+ int propertyIndex = avifArrayPushIndex(properties);
+ avifProperty * prop = &properties->prop[propertyIndex];
+ memcpy(prop->type, header.type, 4);
if (!memcmp(header.type, "ispe", 4)) {
- CHECK(avifParseImageSpatialExtentsProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "auxC", 4)) {
- CHECK(avifParseAuxiliaryTypeProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "colr", 4)) {
- CHECK(avifParseColourInformationBox(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "av1C", 4)) {
- CHECK(avifParseAV1CodecConfigurationBoxProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "pasp", 4)) {
- CHECK(avifParsePixelAspectRatioBoxProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "clap", 4)) {
- CHECK(avifParseCleanApertureBoxProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "irot", 4)) {
- CHECK(avifParseImageRotationProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "imir", 4)) {
- CHECK(avifParseImageMirrorProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
- }
- if (!memcmp(header.type, "pixi", 4)) {
- CHECK(avifParsePixelInformationProperty(data, avifROStreamCurrent(&s), header.size, propertyIndex));
+ CHECK(avifParseImageSpatialExtentsProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "auxC", 4)) {
+ CHECK(avifParseAuxiliaryTypeProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "colr", 4)) {
+ CHECK(avifParseColourInformationBox(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "av1C", 4)) {
+ CHECK(avifParseAV1CodecConfigurationBoxProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "pasp", 4)) {
+ CHECK(avifParsePixelAspectRatioBoxProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "clap", 4)) {
+ CHECK(avifParseCleanApertureBoxProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "irot", 4)) {
+ CHECK(avifParseImageRotationProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "imir", 4)) {
+ CHECK(avifParseImageMirrorProperty(prop, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "pixi", 4)) {
+ CHECK(avifParsePixelInformationProperty(prop, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1040,14 +1181,14 @@ static avifBool avifParseItemPropertyContainerBox(avifDecoderData * data, const
return AVIF_TRUE;
}
-static avifBool avifParseItemPropertyAssociation(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemPropertyAssociation(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
uint8_t version;
- uint8_t flags[3];
- CHECK(avifROStreamReadVersionAndFlags(&s, &version, flags));
- avifBool propertyIndexIsU16 = ((flags[2] & 0x1) != 0);
+ uint32_t flags;
+ CHECK(avifROStreamReadVersionAndFlags(&s, &version, &flags));
+ avifBool propertyIndexIsU16 = ((flags & 0x1) != 0);
uint32_t entryCount;
CHECK(avifROStreamReadU32(&s, &entryCount));
@@ -1082,44 +1223,30 @@ static avifBool avifParseItemPropertyAssociation(avifDecoderData * data, const u
}
--propertyIndex; // 1-indexed
- if (propertyIndex >= data->properties.count) {
+ if (propertyIndex >= meta->properties.count) {
return AVIF_FALSE;
}
- avifDecoderItem * item = avifDecoderDataFindItem(data, itemID);
+ avifDecoderItem * item = avifMetaFindItem(meta, itemID);
if (!item) {
return AVIF_FALSE;
}
- // Associate property with item
- avifProperty * prop = &data->properties.prop[propertyIndex];
- if (!memcmp(prop->type, "ispe", 4)) {
- item->ispePresent = AVIF_TRUE;
- memcpy(&item->ispe, &prop->ispe, sizeof(avifImageSpatialExtents));
- } else if (!memcmp(prop->type, "auxC", 4)) {
- item->auxCPresent = AVIF_TRUE;
- memcpy(&item->auxC, &prop->auxC, sizeof(avifAuxiliaryType));
- } else if (!memcmp(prop->type, "colr", 4)) {
- item->colrPresent = AVIF_TRUE;
- memcpy(&item->colr, &prop->colr, sizeof(avifColourInformationBox));
- } else if (!memcmp(prop->type, "av1C", 4)) {
- item->av1CPresent = AVIF_TRUE;
- memcpy(&item->av1C, &prop->av1C, sizeof(avifCodecConfigurationBox));
- } else if (!memcmp(prop->type, "pasp", 4)) {
- item->paspPresent = AVIF_TRUE;
- memcpy(&item->pasp, &prop->pasp, sizeof(avifPixelAspectRatioBox));
- } else if (!memcmp(prop->type, "clap", 4)) {
- item->clapPresent = AVIF_TRUE;
- memcpy(&item->clap, &prop->clap, sizeof(avifCleanApertureBox));
- } else if (!memcmp(prop->type, "irot", 4)) {
- item->irotPresent = AVIF_TRUE;
- memcpy(&item->irot, &prop->irot, sizeof(avifImageRotation));
- } else if (!memcmp(prop->type, "imir", 4)) {
- item->imirPresent = AVIF_TRUE;
- memcpy(&item->imir, &prop->imir, sizeof(avifImageMirror));
- } else if (!memcmp(prop->type, "pixi", 4)) {
- item->pixiPresent = AVIF_TRUE;
- memcpy(&item->pixi, &prop->pixi, sizeof(avifPixelInformationProperty));
+ // Copy property to item
+ avifProperty * srcProp = &meta->properties.prop[propertyIndex];
+
+ static const char * supportedTypes[] = { "ispe", "auxC", "colr", "av1C", "pasp", "clap", "irot", "imir", "pixi" };
+ size_t supportedTypesCount = sizeof(supportedTypes) / sizeof(supportedTypes[0]);
+ avifBool supportedType = AVIF_FALSE;
+ for (size_t i = 0; i < supportedTypesCount; ++i) {
+ if (!memcmp(srcProp->type, supportedTypes[i], 4)) {
+ supportedType = AVIF_TRUE;
+ break;
+ }
+ }
+ if (supportedType) {
+ avifProperty * dstProp = (avifProperty *)avifArrayPushPtr(&item->properties);
+ memcpy(dstProp, srcProp, sizeof(avifProperty));
} else {
if (essential) {
// Discovered an essential item property that libavif doesn't support!
@@ -1133,9 +1260,9 @@ static avifBool avifParseItemPropertyAssociation(avifDecoderData * data, const u
return AVIF_TRUE;
}
-static avifBool avifParsePrimaryItemBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParsePrimaryItemBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
- if (data->primaryItemID > 0) {
+ if (meta->primaryItemID > 0) {
// Illegal to have multiple pitm boxes, bail out
return AVIF_FALSE;
}
@@ -1148,33 +1275,31 @@ static avifBool avifParsePrimaryItemBox(avifDecoderData * data, const uint8_t *
if (version == 0) {
uint16_t tmp16;
CHECK(avifROStreamReadU16(&s, &tmp16)); // unsigned int(16) item_ID;
- data->primaryItemID = tmp16;
+ meta->primaryItemID = tmp16;
} else {
- CHECK(avifROStreamReadU32(&s, &data->primaryItemID)); // unsigned int(32) item_ID;
+ CHECK(avifROStreamReadU32(&s, &meta->primaryItemID)); // unsigned int(32) item_ID;
}
return AVIF_TRUE;
}
-static avifBool avifParseItemDataBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemDataBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
- uint32_t idatID = data->metaBoxID;
-
// Check to see if we've already seen an idat box for this meta box. If so, bail out
- for (uint32_t i = 0; i < data->idats.count; ++i) {
- if (data->idats.idat[i].id == idatID) {
+ for (uint32_t i = 0; i < meta->idats.count; ++i) {
+ if (meta->idats.idat[i].id == meta->idatID) {
return AVIF_FALSE;
}
}
- int index = avifArrayPushIndex(&data->idats);
- avifDecoderItemData * idat = &data->idats.idat[index];
- idat->id = idatID;
+ int index = avifArrayPushIndex(&meta->idats);
+ avifDecoderItemData * idat = &meta->idats.idat[index];
+ idat->id = meta->idatID;
idat->data.data = raw;
idat->data.size = rawLen;
return AVIF_TRUE;
}
-static avifBool avifParseItemPropertiesBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemPropertiesBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1185,7 +1310,7 @@ static avifBool avifParseItemPropertiesBox(avifDecoderData * data, const uint8_t
}
// Read all item properties inside of ItemPropertyContainerBox
- CHECK(avifParseItemPropertyContainerBox(data, avifROStreamCurrent(&s), ipcoHeader.size));
+ CHECK(avifParseItemPropertyContainerBox(&meta->properties, avifROStreamCurrent(&s), ipcoHeader.size));
CHECK(avifROStreamSkip(&s, ipcoHeader.size));
// Now read all ItemPropertyAssociation until the end of the box, and make associations
@@ -1194,7 +1319,7 @@ static avifBool avifParseItemPropertiesBox(avifDecoderData * data, const uint8_t
CHECK(avifROStreamReadBoxHeader(&s, &ipmaHeader));
if (!memcmp(ipmaHeader.type, "ipma", 4)) {
- CHECK(avifParseItemPropertyAssociation(data, avifROStreamCurrent(&s), ipmaHeader.size));
+ CHECK(avifParseItemPropertyAssociation(meta, avifROStreamCurrent(&s), ipmaHeader.size));
} else {
// These must all be type ipma
return AVIF_FALSE;
@@ -1205,7 +1330,7 @@ static avifBool avifParseItemPropertiesBox(avifDecoderData * data, const uint8_t
return AVIF_TRUE;
}
-static avifBool avifParseItemInfoEntry(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemInfoEntry(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1226,7 +1351,7 @@ static avifBool avifParseItemInfoEntry(avifDecoderData * data, const uint8_t * r
memset(&contentType, 0, sizeof(contentType));
}
- avifDecoderItem * item = avifDecoderDataFindItem(data, itemID);
+ avifDecoderItem * item = avifMetaFindItem(meta, itemID);
if (!item) {
return AVIF_FALSE;
}
@@ -1236,7 +1361,7 @@ static avifBool avifParseItemInfoEntry(avifDecoderData * data, const uint8_t * r
return AVIF_TRUE;
}
-static avifBool avifParseItemInfoBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemInfoBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1258,7 +1383,7 @@ static avifBool avifParseItemInfoBox(avifDecoderData * data, const uint8_t * raw
CHECK(avifROStreamReadBoxHeader(&s, &infeHeader));
if (!memcmp(infeHeader.type, "infe", 4)) {
- CHECK(avifParseItemInfoEntry(data, avifROStreamCurrent(&s), infeHeader.size));
+ CHECK(avifParseItemInfoEntry(meta, avifROStreamCurrent(&s), infeHeader.size));
} else {
// These must all be type ipma
return AVIF_FALSE;
@@ -1270,7 +1395,7 @@ static avifBool avifParseItemInfoBox(avifDecoderData * data, const uint8_t * raw
return AVIF_TRUE;
}
-static avifBool avifParseItemReferenceBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseItemReferenceBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1311,7 +1436,7 @@ static avifBool avifParseItemReferenceBox(avifDecoderData * data, const uint8_t
// Read this reference as "{fromID} is a {irefType} for {toID}"
if (fromID && toID) {
- avifDecoderItem * item = avifDecoderDataFindItem(data, fromID);
+ avifDecoderItem * item = avifMetaFindItem(meta, fromID);
if (!item) {
return AVIF_FALSE;
}
@@ -1327,7 +1452,7 @@ static avifBool avifParseItemReferenceBox(avifDecoderData * data, const uint8_t
}
if (!memcmp(irefHeader.type, "dimg", 4)) {
// derived images refer in the opposite direction
- avifDecoderItem * dimg = avifDecoderDataFindItem(data, toID);
+ avifDecoderItem * dimg = avifMetaFindItem(meta, toID);
if (!dimg) {
return AVIF_FALSE;
}
@@ -1341,30 +1466,30 @@ static avifBool avifParseItemReferenceBox(avifDecoderData * data, const uint8_t
return AVIF_TRUE;
}
-static avifBool avifParseMetaBox(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseMetaBox(avifMeta * meta, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
- ++data->metaBoxID; // for tracking idat
+ ++meta->idatID; // for tracking idat
while (avifROStreamHasBytesLeft(&s, 1)) {
avifBoxHeader header;
CHECK(avifROStreamReadBoxHeader(&s, &header));
if (!memcmp(header.type, "iloc", 4)) {
- CHECK(avifParseItemLocationBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseItemLocationBox(meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "pitm", 4)) {
- CHECK(avifParsePrimaryItemBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParsePrimaryItemBox(meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "idat", 4)) {
- CHECK(avifParseItemDataBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseItemDataBox(meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "iprp", 4)) {
- CHECK(avifParseItemPropertiesBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseItemPropertiesBox(meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "iinf", 4)) {
- CHECK(avifParseItemInfoBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseItemInfoBox(meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "iref", 4)) {
- CHECK(avifParseItemReferenceBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseItemReferenceBox(meta, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1372,14 +1497,12 @@ static avifBool avifParseMetaBox(avifDecoderData * data, const uint8_t * raw, si
return AVIF_TRUE;
}
-static avifBool avifParseTrackHeaderBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseTrackHeaderBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
uint8_t version;
- uint8_t flags[3];
- CHECK(avifROStreamReadVersionAndFlags(&s, &version, flags));
+ CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL));
uint32_t ignored32, trackID;
uint64_t ignored64;
@@ -1422,14 +1545,12 @@ static avifBool avifParseTrackHeaderBox(avifDecoderData * data, avifTrack * trac
return AVIF_TRUE;
}
-static avifBool avifParseMediaHeaderBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseMediaHeaderBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
uint8_t version;
- uint8_t flags[3];
- CHECK(avifROStreamReadVersionAndFlags(&s, &version, flags));
+ CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL));
uint32_t ignored32, mediaTimescale, mediaDuration32;
uint64_t ignored64, mediaDuration64;
@@ -1454,10 +1575,9 @@ static avifBool avifParseMediaHeaderBox(avifDecoderData * data, avifTrack * trac
return AVIF_TRUE;
}
-static avifBool avifParseChunkOffsetBox(avifDecoderData * data, avifSampleTable * sampleTable, avifBool largeOffsets, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseChunkOffsetBox(avifSampleTable * sampleTable, avifBool largeOffsets, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1479,10 +1599,9 @@ static avifBool avifParseChunkOffsetBox(avifDecoderData * data, avifSampleTable
return AVIF_TRUE;
}
-static avifBool avifParseSampleToChunkBox(avifDecoderData * data, avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseSampleToChunkBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1497,10 +1616,9 @@ static avifBool avifParseSampleToChunkBox(avifDecoderData * data, avifSampleTabl
return AVIF_TRUE;
}
-static avifBool avifParseSampleSizeBox(avifDecoderData * data, avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseSampleSizeBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1519,10 +1637,9 @@ static avifBool avifParseSampleSizeBox(avifDecoderData * data, avifSampleTable *
return AVIF_TRUE;
}
-static avifBool avifParseSyncSampleBox(avifDecoderData * data, avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseSyncSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1538,10 +1655,9 @@ static avifBool avifParseSyncSampleBox(avifDecoderData * data, avifSampleTable *
return AVIF_TRUE;
}
-static avifBool avifParseTimeToSampleBox(avifDecoderData * data, avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseTimeToSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1556,10 +1672,9 @@ static avifBool avifParseTimeToSampleBox(avifDecoderData * data, avifSampleTable
return AVIF_TRUE;
}
-static avifBool avifParseSampleDescriptionBox(avifDecoderData * data, avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseSampleDescriptionBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
CHECK(avifROStreamReadAndEnforceVersion(&s, 0));
@@ -1571,21 +1686,12 @@ static avifBool avifParseSampleDescriptionBox(avifDecoderData * data, avifSample
CHECK(avifROStreamReadBoxHeader(&s, &sampleEntryHeader));
avifSampleDescription * description = (avifSampleDescription *)avifArrayPushPtr(&sampleTable->sampleDescriptions);
+ avifArrayCreate(&description->properties, sizeof(avifProperty), 16);
memcpy(description->format, sampleEntryHeader.type, sizeof(description->format));
size_t remainingBytes = avifROStreamRemainingBytes(&s);
if (!memcmp(description->format, "av01", 4) && (remainingBytes > VISUALSAMPLEENTRY_SIZE)) {
- BEGIN_STREAM(av01Stream, avifROStreamCurrent(&s) + VISUALSAMPLEENTRY_SIZE, remainingBytes - VISUALSAMPLEENTRY_SIZE);
- while (avifROStreamHasBytesLeft(&av01Stream, 1)) {
- avifBoxHeader av01ChildHeader;
- CHECK(avifROStreamReadBoxHeader(&av01Stream, &av01ChildHeader));
-
- if (!memcmp(av01ChildHeader.type, "av1C", 4)) {
- CHECK(avifParseAV1CodecConfigurationBox(avifROStreamCurrent(&av01Stream), av01ChildHeader.size, &description->av1C));
- description->av1CPresent = AVIF_TRUE;
- }
-
- CHECK(avifROStreamSkip(&av01Stream, av01ChildHeader.size));
- }
+ CHECK(avifParseItemPropertyContainerBox(
+ &description->properties, avifROStreamCurrent(&s) + VISUALSAMPLEENTRY_SIZE, remainingBytes - VISUALSAMPLEENTRY_SIZE));
}
CHECK(avifROStreamSkip(&s, sampleEntryHeader.size));
@@ -1593,7 +1699,7 @@ static avifBool avifParseSampleDescriptionBox(avifDecoderData * data, avifSample
return AVIF_TRUE;
}
-static avifBool avifParseSampleTableBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseSampleTableBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
if (track->sampleTable) {
// A TrackBox may only have one SampleTable
@@ -1608,19 +1714,19 @@ static avifBool avifParseSampleTableBox(avifDecoderData * data, avifTrack * trac
CHECK(avifROStreamReadBoxHeader(&s, &header));
if (!memcmp(header.type, "stco", 4)) {
- CHECK(avifParseChunkOffsetBox(data, track->sampleTable, AVIF_FALSE, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseChunkOffsetBox(track->sampleTable, AVIF_FALSE, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "co64", 4)) {
- CHECK(avifParseChunkOffsetBox(data, track->sampleTable, AVIF_TRUE, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseChunkOffsetBox(track->sampleTable, AVIF_TRUE, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "stsc", 4)) {
- CHECK(avifParseSampleToChunkBox(data, track->sampleTable, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseSampleToChunkBox(track->sampleTable, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "stsz", 4)) {
- CHECK(avifParseSampleSizeBox(data, track->sampleTable, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseSampleSizeBox(track->sampleTable, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "stss", 4)) {
- CHECK(avifParseSyncSampleBox(data, track->sampleTable, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseSyncSampleBox(track->sampleTable, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "stts", 4)) {
- CHECK(avifParseTimeToSampleBox(data, track->sampleTable, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseTimeToSampleBox(track->sampleTable, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "stsd", 4)) {
- CHECK(avifParseSampleDescriptionBox(data, track->sampleTable, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseSampleDescriptionBox(track->sampleTable, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1628,7 +1734,7 @@ static avifBool avifParseSampleTableBox(avifDecoderData * data, avifTrack * trac
return AVIF_TRUE;
}
-static avifBool avifParseMediaInformationBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseMediaInformationBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1637,7 +1743,7 @@ static avifBool avifParseMediaInformationBox(avifDecoderData * data, avifTrack *
CHECK(avifROStreamReadBoxHeader(&s, &header));
if (!memcmp(header.type, "stbl", 4)) {
- CHECK(avifParseSampleTableBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseSampleTableBox(track, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1645,7 +1751,7 @@ static avifBool avifParseMediaInformationBox(avifDecoderData * data, avifTrack *
return AVIF_TRUE;
}
-static avifBool avifParseMediaBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifParseMediaBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -1654,9 +1760,9 @@ static avifBool avifParseMediaBox(avifDecoderData * data, avifTrack * track, con
CHECK(avifROStreamReadBoxHeader(&s, &header));
if (!memcmp(header.type, "mdhd", 4)) {
- CHECK(avifParseMediaHeaderBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseMediaHeaderBox(track, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "minf", 4)) {
- CHECK(avifParseMediaInformationBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseMediaInformationBox(track, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1664,10 +1770,9 @@ static avifBool avifParseMediaBox(avifDecoderData * data, avifTrack * track, con
return AVIF_TRUE;
}
-static avifBool avifTrackReferenceBox(avifDecoderData * data, avifTrack * track, const uint8_t * raw, size_t rawLen)
+static avifBool avifTrackReferenceBox(avifTrack * track, const uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
- (void)data;
while (avifROStreamHasBytesLeft(&s, 1)) {
avifBoxHeader header;
@@ -1689,18 +1794,20 @@ static avifBool avifParseTrackBox(avifDecoderData * data, const uint8_t * raw, s
{
BEGIN_STREAM(s, raw, rawLen);
- avifTrack * track = (avifTrack *)avifArrayPushPtr(&data->tracks);
+ avifTrack * track = avifDecoderDataCreateTrack(data);
while (avifROStreamHasBytesLeft(&s, 1)) {
avifBoxHeader header;
CHECK(avifROStreamReadBoxHeader(&s, &header));
if (!memcmp(header.type, "tkhd", 4)) {
- CHECK(avifParseTrackHeaderBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseTrackHeaderBox(track, avifROStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "meta", 4)) {
+ CHECK(avifParseMetaBox(track->meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "mdia", 4)) {
- CHECK(avifParseMediaBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseMediaBox(track, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "tref", 4)) {
- CHECK(avifTrackReferenceBox(data, track, avifROStreamCurrent(&s), header.size));
+ CHECK(avifTrackReferenceBox(track, avifROStreamCurrent(&s), header.size));
}
CHECK(avifROStreamSkip(&s, header.size));
@@ -1736,11 +1843,8 @@ static avifBool avifParseFileTypeBox(avifFileType * ftyp, const uint8_t * raw, s
if ((compatibleBrandsBytes % 4) != 0) {
return AVIF_FALSE;
}
- if (compatibleBrandsBytes > (4 * MAX_COMPATIBLE_BRANDS)) {
- // TODO: stop clamping and resize this
- compatibleBrandsBytes = (4 * MAX_COMPATIBLE_BRANDS);
- }
- CHECK(avifROStreamRead(&s, ftyp->compatibleBrands, compatibleBrandsBytes));
+ ftyp->compatibleBrands = avifROStreamCurrent(&s);
+ CHECK(avifROStreamSkip(&s, compatibleBrandsBytes));
ftyp->compatibleBrandsCount = (int)compatibleBrandsBytes / 4;
return AVIF_TRUE;
@@ -1757,7 +1861,7 @@ static avifBool avifParse(avifDecoderData * data, const uint8_t * raw, size_t ra
if (!memcmp(header.type, "ftyp", 4)) {
CHECK(avifParseFileTypeBox(&data->ftyp, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "meta", 4)) {
- CHECK(avifParseMetaBox(data, avifROStreamCurrent(&s), header.size));
+ CHECK(avifParseMetaBox(data->meta, avifROStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "moov", 4)) {
CHECK(avifParseMoovBox(data, avifROStreamCurrent(&s), header.size));
}
@@ -1771,25 +1875,11 @@ static avifBool avifParse(avifDecoderData * data, const uint8_t * raw, size_t ra
static avifBool avifFileTypeIsCompatible(avifFileType * ftyp)
{
- avifBool avifCompatible = (memcmp(ftyp->majorBrand, "avif", 4) == 0);
- if (!avifCompatible) {
- avifCompatible = (memcmp(ftyp->majorBrand, "avis", 4) == 0);
- }
- if (!avifCompatible) {
- avifCompatible = (memcmp(ftyp->majorBrand, "av01", 4) == 0);
- }
+ avifBool avifCompatible = (memcmp(ftyp->majorBrand, "avif", 4) == 0 || memcmp(ftyp->majorBrand, "avis", 4) == 0);
if (!avifCompatible) {
for (int compatibleBrandIndex = 0; compatibleBrandIndex < ftyp->compatibleBrandsCount; ++compatibleBrandIndex) {
- uint8_t * compatibleBrand = &ftyp->compatibleBrands[4 * compatibleBrandIndex];
- if (!memcmp(compatibleBrand, "avif", 4)) {
- avifCompatible = AVIF_TRUE;
- break;
- }
- if (!memcmp(compatibleBrand, "avis", 4)) {
- avifCompatible = AVIF_TRUE;
- break;
- }
- if (!memcmp(compatibleBrand, "av01", 4)) {
+ const uint8_t * compatibleBrand = &ftyp->compatibleBrands[4 * compatibleBrandIndex];
+ if (!memcmp(compatibleBrand, "avif", 4) || !memcmp(compatibleBrand, "avis", 4)) {
avifCompatible = AVIF_TRUE;
break;
}
@@ -1798,7 +1888,7 @@ static avifBool avifFileTypeIsCompatible(avifFileType * ftyp)
return avifCompatible;
}
-avifBool avifPeekCompatibleFileType(avifROData * input)
+avifBool avifPeekCompatibleFileType(const avifROData * input)
{
BEGIN_STREAM(s, input->data, input->size);
@@ -1851,7 +1941,7 @@ avifResult avifDecoderSetSource(avifDecoder * decoder, avifDecoderSource source)
return avifDecoderReset(decoder);
}
-avifResult avifDecoderParse(avifDecoder * decoder, avifROData * rawInput)
+avifResult avifDecoderParse(avifDecoder * decoder, const avifROData * rawInput)
{
// Cleanup anything lingering in the decoder
avifDecoderCleanup(decoder);
@@ -1874,8 +1964,8 @@ avifResult avifDecoderParse(avifDecoder * decoder, avifROData * rawInput)
}
// Sanity check items
- for (uint32_t itemIndex = 0; itemIndex < decoder->data->items.count; ++itemIndex) {
- avifDecoderItem * item = &decoder->data->items.item[itemIndex];
+ for (uint32_t itemIndex = 0; itemIndex < decoder->data->meta->items.count; ++itemIndex) {
+ avifDecoderItem * item = &decoder->data->meta->items.item[itemIndex];
if (item->hasUnsupportedEssentialProperty) {
// An essential property isn't supported by libavif; ignore the item.
continue;
@@ -1964,6 +2054,7 @@ avifResult avifDecoderReset(avifDecoder * decoder)
data->source = decoder->requestedSource;
}
+ const avifPropertyArray * colorProperties = NULL;
if (data->source == AVIF_DECODER_SOURCE_TRACKS) {
avifTrack * colorTrack = NULL;
avifTrack * alphaTrack = NULL;
@@ -1975,6 +2066,9 @@ avifResult avifDecoderReset(avifDecoder * decoder)
if (!track->sampleTable) {
continue;
}
+ if (!track->id) { // trak box might be missing a tkhd box inside, skip it
+ continue;
+ }
if (!track->sampleTable->chunks.count) {
continue;
}
@@ -1993,12 +2087,28 @@ avifResult avifDecoderReset(avifDecoder * decoder)
}
colorTrack = &decoder->data->tracks.track[colorTrackIndex];
+ colorProperties = avifSampleTableGetProperties(colorTrack->sampleTable);
+ if (!colorProperties) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+
+ // Find Exif and/or XMP metadata, if any
+ if (colorTrack->meta) {
+ // See the comment above avifDecoderDataFindMetadata() for the explanation of using 0 here
+ if (!avifDecoderDataFindMetadata(data, colorTrack->meta, decoder->image, 0)) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ }
+
uint32_t alphaTrackIndex = 0;
for (; alphaTrackIndex < decoder->data->tracks.count; ++alphaTrackIndex) {
avifTrack * track = &decoder->data->tracks.track[alphaTrackIndex];
if (!track->sampleTable) {
continue;
}
+ if (!track->id) {
+ continue;
+ }
if (!track->sampleTable->chunks.count) {
continue;
}
@@ -2045,24 +2155,22 @@ avifResult avifDecoderReset(avifDecoder * decoder)
}
memset(&decoder->imageTiming, 0, sizeof(decoder->imageTiming)); // to be set in avifDecoderNextImage()
- decoder->containerWidth = colorTrack->width;
- decoder->containerHeight = colorTrack->height;
- decoder->containerDepth = avifSampleTableGetDepth(colorTrack->sampleTable);
+ decoder->image->width = colorTrack->width;
+ decoder->image->height = colorTrack->height;
+ decoder->alphaPresent = (alphaTrack != NULL);
} else {
// Create from items
avifROData colorOBU = AVIF_DATA_EMPTY;
avifROData alphaOBU = AVIF_DATA_EMPTY;
- avifROData exifData = AVIF_DATA_EMPTY;
- avifROData xmpData = AVIF_DATA_EMPTY;
avifDecoderItem * colorOBUItem = NULL;
avifDecoderItem * alphaOBUItem = NULL;
// Find the colorOBU (primary) item
- for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
- avifDecoderItem * item = &data->items.item[itemIndex];
- if (!item->id || !item->size) {
- break;
+ for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) {
+ avifDecoderItem * item = &data->meta->items.item[itemIndex];
+ if (!item->size) {
+ continue;
}
if (item->hasUnsupportedEssentialProperty) {
// An essential property isn't supported by libavif; ignore the item.
@@ -2077,12 +2185,15 @@ avifResult avifDecoderReset(avifDecoder * decoder)
// It's a thumbnail, skip it
continue;
}
- if ((data->primaryItemID > 0) && (item->id != data->primaryItemID)) {
+ if ((data->meta->primaryItemID > 0) && (item->id != data->meta->primaryItemID)) {
// a primary item ID was specified, require it
continue;
}
if (isGrid) {
+ if (decoder->disableGridImages) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
const uint8_t * itemPtr = avifDecoderDataCalcItemPtr(data, item);
if (itemPtr == NULL) {
return AVIF_RESULT_BMFF_PARSE_FAILED;
@@ -2102,12 +2213,13 @@ avifResult avifDecoderReset(avifDecoder * decoder)
if (!colorOBUItem) {
return AVIF_RESULT_NO_AV1_ITEMS_FOUND;
}
+ colorProperties = &colorOBUItem->properties;
// Find the alphaOBU item, if any
- for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
- avifDecoderItem * item = &data->items.item[itemIndex];
- if (!item->id || !item->size) {
- break;
+ for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) {
+ avifDecoderItem * item = &data->meta->items.item[itemIndex];
+ if (!item->size) {
+ continue;
}
if (item->hasUnsupportedEssentialProperty) {
// An essential property isn't supported by libavif; ignore the item.
@@ -2123,8 +2235,12 @@ avifResult avifDecoderReset(avifDecoder * decoder)
continue;
}
- if (isAlphaURN(item->auxC.auxType) && (item->auxForID == colorOBUItem->id)) {
+ const avifProperty * auxCProp = avifPropertyArrayFind(&item->properties, "auxC");
+ if (auxCProp && isAlphaURN(auxCProp->u.auxC.auxType) && (item->auxForID == colorOBUItem->id)) {
if (isGrid) {
+ if (decoder->disableGridImages) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
const uint8_t * itemPtr = avifDecoderDataCalcItemPtr(data, item);
if (itemPtr == NULL) {
return AVIF_RESULT_BMFF_PARSE_FAILED;
@@ -2143,36 +2259,8 @@ avifResult avifDecoderReset(avifDecoder * decoder)
}
// Find Exif and/or XMP metadata, if any
- for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
- avifDecoderItem * item = &data->items.item[itemIndex];
- if (!item->id || !item->size) {
- break;
- }
- if (item->hasUnsupportedEssentialProperty) {
- // An essential property isn't supported by libavif; ignore the item.
- continue;
- }
-
- if (item->descForID != colorOBUItem->id) {
- // Not a content description (metadata) for the colorOBU, skip it
- continue;
- }
-
- if (!memcmp(item->type, "Exif", 4)) {
- // Advance past Annex A.2.1's header
- const uint8_t * boxPtr = avifDecoderDataCalcItemPtr(data, item);
- BEGIN_STREAM(exifBoxStream, boxPtr, item->size);
- uint32_t exifTiffHeaderOffset;
- CHECK(avifROStreamReadU32(&exifBoxStream, &exifTiffHeaderOffset)); // unsigned int(32) exif_tiff_header_offset;
-
- exifData.data = avifROStreamCurrent(&exifBoxStream);
- exifData.size = avifROStreamRemainingBytes(&exifBoxStream);
- }
-
- if (!memcmp(item->type, "mime", 4) && !memcmp(item->contentType.contentType, xmpContentType, xmpContentTypeSize)) {
- xmpData.data = avifDecoderDataCalcItemPtr(data, item);
- xmpData.size = item->size;
- }
+ if (!avifDecoderDataFindMetadata(data, data->meta, decoder->image, colorOBUItem->id)) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
}
if ((data->colorGrid.rows > 0) && (data->colorGrid.columns > 0)) {
@@ -2186,14 +2274,14 @@ avifResult avifDecoderReset(avifDecoder * decoder)
}
avifTile * colorTile = avifDecoderDataCreateTile(decoder->data);
- avifSample * colorSample = (avifSample *)avifArrayPushPtr(&colorTile->input->samples);
+ avifDecodeSample * colorSample = (avifDecodeSample *)avifArrayPushPtr(&colorTile->input->samples);
memcpy(&colorSample->data, &colorOBU, sizeof(avifROData));
colorSample->sync = AVIF_TRUE;
decoder->data->colorTileCount = 1;
}
if ((data->alphaGrid.rows > 0) && (data->alphaGrid.columns > 0) && alphaOBUItem) {
- if (!avifDecoderDataGenerateImageGridTiles(data, &data->alphaGrid, alphaOBUItem, AVIF_FALSE)) {
+ if (!avifDecoderDataGenerateImageGridTiles(data, &data->alphaGrid, alphaOBUItem, AVIF_TRUE)) {
return AVIF_RESULT_INVALID_IMAGE_GRID;
}
data->alphaTileCount = data->tiles.count - data->colorTileCount;
@@ -2202,7 +2290,7 @@ avifResult avifDecoderReset(avifDecoder * decoder)
if (alphaOBU.size > 0) {
alphaTile = avifDecoderDataCreateTile(decoder->data);
- avifSample * alphaSample = (avifSample *)avifArrayPushPtr(&alphaTile->input->samples);
+ avifDecodeSample * alphaSample = (avifDecodeSample *)avifArrayPushPtr(&alphaTile->input->samples);
memcpy(&alphaSample->data, &alphaOBU, sizeof(avifROData));
alphaSample->sync = AVIF_TRUE;
alphaTile->input->alpha = AVIF_TRUE;
@@ -2210,43 +2298,6 @@ avifResult avifDecoderReset(avifDecoder * decoder)
}
}
- if (colorOBUItem->colrPresent) {
- if (colorOBUItem->colr.hasICC) {
- avifImageSetProfileICC(decoder->image, colorOBUItem->colr.icc, colorOBUItem->colr.iccSize);
- } else if (colorOBUItem->colr.hasNCLX) {
- data->cicpSet = AVIF_TRUE;
- decoder->image->colorPrimaries = colorOBUItem->colr.colorPrimaries;
- decoder->image->transferCharacteristics = colorOBUItem->colr.transferCharacteristics;
- decoder->image->matrixCoefficients = colorOBUItem->colr.matrixCoefficients;
- decoder->image->yuvRange = colorOBUItem->colr.range;
- }
- }
-
- // Transformations
- if (colorOBUItem->paspPresent) {
- decoder->image->transformFlags |= AVIF_TRANSFORM_PASP;
- memcpy(&decoder->image->pasp, &colorOBUItem->pasp, sizeof(avifPixelAspectRatioBox));
- }
- if (colorOBUItem->clapPresent) {
- decoder->image->transformFlags |= AVIF_TRANSFORM_CLAP;
- memcpy(&decoder->image->clap, &colorOBUItem->clap, sizeof(avifCleanApertureBox));
- }
- if (colorOBUItem->irotPresent) {
- decoder->image->transformFlags |= AVIF_TRANSFORM_IROT;
- memcpy(&decoder->image->irot, &colorOBUItem->irot, sizeof(avifImageRotation));
- }
- if (colorOBUItem->imirPresent) {
- decoder->image->transformFlags |= AVIF_TRANSFORM_IMIR;
- memcpy(&decoder->image->imir, &colorOBUItem->imir, sizeof(avifImageMirror));
- }
-
- if (exifData.data && exifData.size) {
- avifImageSetMetadataExif(decoder->image, exifData.data, exifData.size);
- }
- if (xmpData.data && xmpData.size) {
- avifImageSetMetadataXMP(decoder->image, xmpData.data, xmpData.size);
- }
-
// Set all counts and timing to safe-but-uninteresting values
decoder->imageIndex = -1;
decoder->imageCount = 1;
@@ -2262,18 +2313,98 @@ avifResult avifDecoderReset(avifDecoder * decoder)
decoder->ioStats.colorOBUSize = colorOBU.size;
decoder->ioStats.alphaOBUSize = alphaOBU.size;
- if (colorOBUItem->ispePresent) {
- decoder->containerWidth = colorOBUItem->ispe.width;
- decoder->containerHeight = colorOBUItem->ispe.height;
+ const avifProperty * ispeProp = avifPropertyArrayFind(colorProperties, "ispe");
+ if (ispeProp) {
+ decoder->image->width = ispeProp->u.ispe.width;
+ decoder->image->height = ispeProp->u.ispe.height;
} else {
- decoder->containerWidth = 0;
- decoder->containerHeight = 0;
+ decoder->image->width = 0;
+ decoder->image->height = 0;
}
- if (colorOBUItem->av1CPresent) {
- decoder->containerDepth = avifCodecConfigurationBoxGetDepth(&colorOBUItem->av1C);
+ decoder->alphaPresent = (alphaOBUItem != NULL);
+ }
+
+ // Sanity check tiles
+ for (uint32_t tileIndex = 0; tileIndex < data->tiles.count; ++tileIndex) {
+ avifTile * tile = &data->tiles.tile[tileIndex];
+ for (uint32_t sampleIndex = 0; sampleIndex < tile->input->samples.count; ++sampleIndex) {
+ avifDecodeSample * sample = &tile->input->samples.sample[sampleIndex];
+ if (!sample->data.data || !sample->data.size) {
+ // Every sample must have some data
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ }
+ }
+
+ const avifProperty * colrProp = avifPropertyArrayFind(colorProperties, "colr");
+ if (colrProp) {
+ if (colrProp->u.colr.hasICC) {
+ avifImageSetProfileICC(decoder->image, colrProp->u.colr.icc, colrProp->u.colr.iccSize);
+ } else if (colrProp->u.colr.hasNCLX) {
+ data->cicpSet = AVIF_TRUE;
+ decoder->image->colorPrimaries = colrProp->u.colr.colorPrimaries;
+ decoder->image->transferCharacteristics = colrProp->u.colr.transferCharacteristics;
+ decoder->image->matrixCoefficients = colrProp->u.colr.matrixCoefficients;
+ decoder->image->yuvRange = colrProp->u.colr.range;
+ }
+ }
+
+ // Transformations
+ const avifProperty * paspProp = avifPropertyArrayFind(colorProperties, "pasp");
+ if (paspProp) {
+ decoder->image->transformFlags |= AVIF_TRANSFORM_PASP;
+ memcpy(&decoder->image->pasp, &paspProp->u.pasp, sizeof(avifPixelAspectRatioBox));
+ }
+ const avifProperty * clapProp = avifPropertyArrayFind(colorProperties, "clap");
+ if (clapProp) {
+ decoder->image->transformFlags |= AVIF_TRANSFORM_CLAP;
+ memcpy(&decoder->image->clap, &clapProp->u.clap, sizeof(avifCleanApertureBox));
+ }
+ const avifProperty * irotProp = avifPropertyArrayFind(colorProperties, "irot");
+ if (irotProp) {
+ decoder->image->transformFlags |= AVIF_TRANSFORM_IROT;
+ memcpy(&decoder->image->irot, &irotProp->u.irot, sizeof(avifImageRotation));
+ }
+ const avifProperty * imirProp = avifPropertyArrayFind(colorProperties, "imir");
+ if (imirProp) {
+ decoder->image->transformFlags |= AVIF_TRANSFORM_IMIR;
+ memcpy(&decoder->image->imir, &imirProp->u.imir, sizeof(avifImageMirror));
+ }
+
+ if (!decoder->data->cicpSet && (data->tiles.count > 0)) {
+ avifTile * firstTile = &data->tiles.tile[0];
+ if (firstTile->input->samples.count > 0) {
+ avifDecodeSample * sample = &firstTile->input->samples.sample[0];
+ avifSequenceHeader sequenceHeader;
+ if (avifSequenceHeaderParse(&sequenceHeader, &sample->data)) {
+ decoder->data->cicpSet = AVIF_TRUE;
+ decoder->image->colorPrimaries = sequenceHeader.colorPrimaries;
+ decoder->image->transferCharacteristics = sequenceHeader.transferCharacteristics;
+ decoder->image->matrixCoefficients = sequenceHeader.matrixCoefficients;
+ decoder->image->yuvRange = sequenceHeader.range;
+ }
+ }
+ }
+
+ const avifProperty * av1CProp = avifPropertyArrayFind(colorProperties, "av1C");
+ if (av1CProp) {
+ decoder->image->depth = avifCodecConfigurationBoxGetDepth(&av1CProp->u.av1C);
+ if (av1CProp->u.av1C.monochrome) {
+ decoder->image->yuvFormat = AVIF_PIXEL_FORMAT_YUV400;
} else {
- decoder->containerDepth = 0;
+ if (av1CProp->u.av1C.chromaSubsamplingX && av1CProp->u.av1C.chromaSubsamplingY) {
+ decoder->image->yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
+ } else if (av1CProp->u.av1C.chromaSubsamplingX) {
+ decoder->image->yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
+
+ } else {
+ decoder->image->yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ }
}
+ decoder->image->yuvChromaSamplePosition = (avifChromaSamplePosition)av1CProp->u.av1C.chromaSamplePosition;
+ } else {
+ // An av1C box is mandatory in all valid AVIF configurations. Bail out.
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
}
return avifDecoderFlush(decoder);
@@ -2323,14 +2454,17 @@ avifResult avifDecoderNextImage(avifDecoder * decoder)
decoder->image->width = srcColor->width;
decoder->image->height = srcColor->height;
decoder->image->depth = srcColor->depth;
+ }
- if (!decoder->data->cicpSet) {
- decoder->data->cicpSet = AVIF_TRUE;
- decoder->image->colorPrimaries = srcColor->colorPrimaries;
- decoder->image->transferCharacteristics = srcColor->transferCharacteristics;
- decoder->image->matrixCoefficients = srcColor->matrixCoefficients;
- }
+#if 0
+ // This code is currently unnecessary as the CICP is always set by the end of avifDecoderParse().
+ if (!decoder->data->cicpSet) {
+ decoder->data->cicpSet = AVIF_TRUE;
+ decoder->image->colorPrimaries = srcColor->colorPrimaries;
+ decoder->image->transferCharacteristics = srcColor->transferCharacteristics;
+ decoder->image->matrixCoefficients = srcColor->matrixCoefficients;
}
+#endif
avifImageStealPlanes(decoder->image, srcColor, AVIF_PLANES_YUV);
}
@@ -2372,7 +2506,7 @@ avifResult avifDecoderNextImage(avifDecoder * decoder)
return AVIF_RESULT_OK;
}
-avifResult avifDecoderNthImageTiming(avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming)
+avifResult avifDecoderNthImageTiming(const avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming)
{
if (!decoder->data) {
// Nothing has been parsed yet
@@ -2442,7 +2576,7 @@ avifResult avifDecoderNthImage(avifDecoder * decoder, uint32_t frameIndex)
return AVIF_RESULT_OK;
}
-avifBool avifDecoderIsKeyframe(avifDecoder * decoder, uint32_t frameIndex)
+avifBool avifDecoderIsKeyframe(const avifDecoder * decoder, uint32_t frameIndex)
{
if ((decoder->data->tiles.count > 0) && decoder->data->tiles.tile[0].input) {
if (frameIndex < decoder->data->tiles.tile[0].input->samples.count) {
@@ -2452,7 +2586,7 @@ avifBool avifDecoderIsKeyframe(avifDecoder * decoder, uint32_t frameIndex)
return AVIF_FALSE;
}
-uint32_t avifDecoderNearestKeyframe(avifDecoder * decoder, uint32_t frameIndex)
+uint32_t avifDecoderNearestKeyframe(const avifDecoder * decoder, uint32_t frameIndex)
{
for (; frameIndex != 0; --frameIndex) {
if (avifDecoderIsKeyframe(decoder, frameIndex)) {
@@ -2462,7 +2596,7 @@ uint32_t avifDecoderNearestKeyframe(avifDecoder * decoder, uint32_t frameIndex)
return frameIndex;
}
-avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifROData * input)
+avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, const avifROData * input)
{
avifResult result = avifDecoderParse(decoder, input);
if (result != AVIF_RESULT_OK) {
@@ -2472,6 +2606,6 @@ avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifROData
if (result != AVIF_RESULT_OK) {
return result;
}
- avifImageCopy(image, decoder->image);
+ avifImageCopy(image, decoder->image, AVIF_PLANES_ALL);
return AVIF_RESULT_OK;
}
diff --git a/chromium/third_party/libavif/src/src/reformat.c b/chromium/third_party/libavif/src/src/reformat.c
index 616f6a12061..ee1906562b0 100644
--- a/chromium/third_party/libavif/src/src/reformat.c
+++ b/chromium/third_party/libavif/src/src/reformat.c
@@ -13,7 +13,7 @@ struct YUVBlock
float v;
};
-avifBool avifPrepareReformatState(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+avifBool avifPrepareReformatState(const avifImage * image, const avifRGBImage * rgb, avifReformatState * state)
{
if ((image->depth != 8) && (image->depth != 10) && (image->depth != 12)) {
return AVIF_FALSE;
@@ -122,7 +122,7 @@ static int yuvToUNorm(int chan, avifRange range, int depth, float maxChannel, fl
return unorm;
}
-avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
+avifResult avifImageRGBToYUV(avifImage * image, const avifRGBImage * rgb)
{
if (!rgb->pixels) {
return AVIF_RESULT_REFORMAT_FAILED;
@@ -134,7 +134,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
}
avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
- if (avifRGBFormatHasAlpha(rgb->format)) {
+ if (avifRGBFormatHasAlpha(rgb->format) && !rgb->ignoreAlpha) {
avifImageAllocatePlanes(image, AVIF_PLANES_A);
}
@@ -198,7 +198,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
uint16_t * pY = (uint16_t *)&yuvPlanes[AVIF_CHAN_Y][(i * 2) + (j * yuvRowBytes[AVIF_CHAN_Y])];
*pY = (uint16_t)yuvToUNorm(
AVIF_CHAN_Y, image->yuvRange, image->depth, yuvMaxChannel, yuvBlock[bI][bJ].y, state.mode);
- if (!state.formatInfo.chromaShiftX && !state.formatInfo.chromaShiftY) {
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) {
// YUV444, full chroma
uint16_t * pU = (uint16_t *)&yuvPlanes[AVIF_CHAN_U][(i * 2) + (j * yuvRowBytes[AVIF_CHAN_U])];
*pU = (uint16_t)yuvToUNorm(
@@ -210,7 +210,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
} else {
yuvPlanes[AVIF_CHAN_Y][i + (j * yuvRowBytes[AVIF_CHAN_Y])] = (uint8_t)yuvToUNorm(
AVIF_CHAN_Y, image->yuvRange, image->depth, yuvMaxChannel, yuvBlock[bI][bJ].y, state.mode);
- if (!state.formatInfo.chromaShiftX && !state.formatInfo.chromaShiftY) {
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) {
// YUV444, full chroma
yuvPlanes[AVIF_CHAN_U][i + (j * yuvRowBytes[AVIF_CHAN_U])] = (uint8_t)yuvToUNorm(
AVIF_CHAN_U, image->yuvRange, image->depth, yuvMaxChannel, yuvBlock[bI][bJ].u, state.mode);
@@ -222,7 +222,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
}
// Populate any subsampled channels with averages from the 2x2 block
- if (state.formatInfo.chromaShiftX && state.formatInfo.chromaShiftY) {
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420) {
// YUV420, average 4 samples (2x2)
float sumU = 0.0f;
@@ -237,8 +237,10 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
float avgU = sumU / totalSamples;
float avgV = sumV / totalSamples;
- int uvI = outerI >> state.formatInfo.chromaShiftX;
- int uvJ = outerJ >> state.formatInfo.chromaShiftY;
+ const int chromaShiftX = 1;
+ const int chromaShiftY = 1;
+ int uvI = outerI >> chromaShiftX;
+ int uvJ = outerJ >> chromaShiftY;
if (state.yuvChannelBytes > 1) {
uint16_t * pU = (uint16_t *)&yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * yuvRowBytes[AVIF_CHAN_U])];
*pU = (uint16_t)yuvToUNorm(AVIF_CHAN_U, image->yuvRange, image->depth, yuvMaxChannel, avgU, state.mode);
@@ -250,7 +252,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
yuvPlanes[AVIF_CHAN_V][uvI + (uvJ * yuvRowBytes[AVIF_CHAN_V])] =
(uint8_t)yuvToUNorm(AVIF_CHAN_V, image->yuvRange, image->depth, yuvMaxChannel, avgV, state.mode);
}
- } else if (state.formatInfo.chromaShiftX && !state.formatInfo.chromaShiftY) {
+ } else if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV422) {
// YUV422, average 2 samples (1x2), twice
for (int bJ = 0; bJ < blockH; ++bJ) {
@@ -264,7 +266,8 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
float avgU = sumU / totalSamples;
float avgV = sumV / totalSamples;
- int uvI = outerI >> state.formatInfo.chromaShiftX;
+ const int chromaShiftX = 1;
+ int uvI = outerI >> chromaShiftX;
int uvJ = outerJ + bJ;
if (state.yuvChannelBytes > 1) {
uint16_t * pU = (uint16_t *)&yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * yuvRowBytes[AVIF_CHAN_U])];
@@ -294,7 +297,7 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
params.dstOffsetBytes = 0;
params.dstPixelBytes = state.yuvChannelBytes;
- if (avifRGBFormatHasAlpha(rgb->format)) {
+ if (avifRGBFormatHasAlpha(rgb->format) && !rgb->ignoreAlpha) {
params.srcDepth = rgb->depth;
params.srcRange = AVIF_RANGE_FULL;
params.srcPlane = rgb->pixels;
@@ -310,83 +313,199 @@ avifResult avifImageRGBToYUV(avifImage * image, avifRGBImage * rgb)
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUVAnyToRGBAnySlow(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUVAnyToRGBAnySlow(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
+ // Aliases for some state
const float kr = state->kr;
const float kg = state->kg;
const float kb = state->kb;
- const uint32_t rgbPixelBytes = state->rgbPixelBytes;
const float * const unormFloatTableY = state->unormFloatTableY;
const float * const unormFloatTableUV = state->unormFloatTableUV;
- const uint32_t maxUVI = ((image->width + state->formatInfo.chromaShiftX) >> state->formatInfo.chromaShiftX) - 1;
- const uint32_t maxUVJ = ((image->height + state->formatInfo.chromaShiftY) >> state->formatInfo.chromaShiftY) - 1;
- const avifBool hasColor = (image->yuvPlanes[AVIF_CHAN_U] && image->yuvPlanes[AVIF_CHAN_V]);
+ const uint32_t yuvChannelBytes = state->yuvChannelBytes;
+ const uint32_t rgbPixelBytes = state->rgbPixelBytes;
+
+ // Aliases for plane data
+ const uint8_t * yPlane = image->yuvPlanes[AVIF_CHAN_Y];
+ const uint8_t * uPlane = image->yuvPlanes[AVIF_CHAN_U];
+ const uint8_t * vPlane = image->yuvPlanes[AVIF_CHAN_V];
+ const uint32_t yRowBytes = image->yuvRowBytes[AVIF_CHAN_Y];
+ const uint32_t uRowBytes = image->yuvRowBytes[AVIF_CHAN_U];
+ const uint32_t vRowBytes = image->yuvRowBytes[AVIF_CHAN_V];
+ // Various observations and limits
+ const avifBool hasColor = (uPlane && vPlane && (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV400));
const uint16_t yuvMaxChannel = (uint16_t)((1 << image->depth) - 1);
const float rgbMaxChannel = (float)((1 << rgb->depth) - 1);
+
for (uint32_t j = 0; j < image->height; ++j) {
- const uint32_t uvJ = AVIF_MIN(j >> state->formatInfo.chromaShiftY, maxUVJ);
- uint8_t * ptrY8 = &image->yuvPlanes[AVIF_CHAN_Y][(j * image->yuvRowBytes[AVIF_CHAN_Y])];
- uint8_t * ptrU8 = NULL;
- uint8_t * ptrV8 = NULL;
- if (hasColor) {
- ptrU8 = &image->yuvPlanes[AVIF_CHAN_U][(uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
- ptrV8 = &image->yuvPlanes[AVIF_CHAN_V][(uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
- }
- uint16_t * ptrY16 = (uint16_t *)ptrY8;
- uint16_t * ptrU16 = (uint16_t *)ptrU8;
- uint16_t * ptrV16 = (uint16_t *)ptrV8;
+ const uint32_t uvJ = j >> state->formatInfo.chromaShiftY;
+ const uint8_t * ptrY8 = &yPlane[j * yRowBytes];
+ const uint8_t * ptrU8 = &uPlane[(uvJ * uRowBytes)];
+ const uint8_t * ptrV8 = &vPlane[(uvJ * vRowBytes)];
+ const uint16_t * ptrY16 = (const uint16_t *)ptrY8;
+ const uint16_t * ptrU16 = (const uint16_t *)ptrU8;
+ const uint16_t * ptrV16 = (const uint16_t *)ptrV8;
+
uint8_t * ptrR = &rgb->pixels[state->rgbOffsetBytesR + (j * rgb->rowBytes)];
uint8_t * ptrG = &rgb->pixels[state->rgbOffsetBytesG + (j * rgb->rowBytes)];
uint8_t * ptrB = &rgb->pixels[state->rgbOffsetBytesB + (j * rgb->rowBytes)];
for (uint32_t i = 0; i < image->width; ++i) {
- uint32_t uvI = AVIF_MIN(i >> state->formatInfo.chromaShiftX, maxUVI);
- uint16_t unormY, unormU, unormV;
+ uint32_t uvI = i >> state->formatInfo.chromaShiftX;
+ float Y, Cb = 0.5f, Cr = 0.5f;
- // clamp incoming data to protect against bad LUT lookups
+ // Calculate Y
+ uint16_t unormY;
if (image->depth == 8) {
- unormY = (uint16_t)AVIF_MIN(ptrY8[i], yuvMaxChannel);
- if (hasColor) {
- unormU = (uint16_t)AVIF_MIN(ptrU8[uvI], yuvMaxChannel);
- unormV = (uint16_t)AVIF_MIN(ptrV8[uvI], yuvMaxChannel);
- } else {
- unormU = 0;
- unormV = 0;
- }
+ unormY = ptrY8[i];
} else {
+ // clamp incoming data to protect against bad LUT lookups
unormY = AVIF_MIN(ptrY16[i], yuvMaxChannel);
- if (hasColor) {
- unormU = AVIF_MIN(ptrU16[uvI], yuvMaxChannel);
- unormV = AVIF_MIN(ptrV16[uvI], yuvMaxChannel);
+ }
+ Y = unormFloatTableY[unormY];
+
+ // Calculate Cb and Cr
+ if (hasColor) {
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) {
+ uint16_t unormU, unormV;
+
+ if (image->depth == 8) {
+ unormU = ptrU8[uvI];
+ unormV = ptrV8[uvI];
+ } else {
+ // clamp incoming data to protect against bad LUT lookups
+ unormU = AVIF_MIN(ptrU16[uvI], yuvMaxChannel);
+ unormV = AVIF_MIN(ptrV16[uvI], yuvMaxChannel);
+ }
+
+ Cb = unormFloatTableUV[unormU];
+ Cr = unormFloatTableUV[unormV];
} else {
- unormU = 0;
- unormV = 0;
+ // Upsample to 444:
+ //
+ // * * * *
+ // A B
+ // * 1 2 *
+ //
+ // * 3 4 *
+ // C D
+ // * * * *
+ //
+ // When converting from YUV420 to RGB, for any given "high-resolution" RGB
+ // coordinate (1,2,3,4,*), there are up to four "low-resolution" UV samples
+ // (A,B,C,D) that are "nearest" to the pixel. For RGB pixel #1, A is the closest
+ // UV sample, B and C are "adjacent" to it on the same row and column, and D is
+ // the diagonal. For RGB pixel 3, C is the closest UV sample, A and D are
+ // adjacent, and B is the diagonal. Sometimes the adjacent pixel on the same row
+ // is to the left or right, and sometimes the adjacent pixel on the same column
+ // is up or down. For any edge or corner, there might only be only one or two
+ // samples nearby, so they'll be duplicated.
+ //
+ // The following code attempts to find all four nearest UV samples and put them
+ // in the following unormU and unormV grid as follows:
+ //
+ // unorm[0][0] = closest ( weights: bilinear: 9/16, nearest: 1 )
+ // unorm[1][0] = adjacent col ( weights: bilinear: 3/16, nearest: 0 )
+ // unorm[0][1] = adjacent row ( weights: bilinear: 3/16, nearest: 0 )
+ // unorm[1][1] = diagonal ( weights: bilinear: 1/16, nearest: 0 )
+ //
+ // It then weights them according to the requested upsampling set in avifRGBImage.
+
+ uint16_t unormU[2][2], unormV[2][2];
+
+ // How many bytes to add to a uint8_t pointer index to get to the adjacent (lesser) sample in a given direction
+ int uAdjCol, vAdjCol, uAdjRow, vAdjRow;
+ if ((i == 0) || ((i == (image->width - 1)) && ((i % 2) != 0))) {
+ uAdjCol = 0;
+ vAdjCol = 0;
+ } else {
+ if ((i % 2) != 0) {
+ uAdjCol = yuvChannelBytes;
+ vAdjCol = yuvChannelBytes;
+ } else {
+ uAdjCol = -1 * yuvChannelBytes;
+ vAdjCol = -1 * yuvChannelBytes;
+ }
+ }
+
+ // For YUV422, uvJ will always be a fresh value (always corresponds to j), so
+ // we'll simply duplicate the sample as if we were on the top or bottom row and
+ // it'll behave as plain old linear (1D) upsampling, which is all we want.
+ if ((j == 0) || ((j == (image->height - 1)) && ((j % 2) != 0)) || (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV422)) {
+ uAdjRow = 0;
+ vAdjRow = 0;
+ } else {
+ if ((j % 2) != 0) {
+ uAdjRow = (int)uRowBytes;
+ vAdjRow = (int)vRowBytes;
+ } else {
+ uAdjRow = -1 * (int)uRowBytes;
+ vAdjRow = -1 * (int)vRowBytes;
+ }
+ }
+
+ if (image->depth == 8) {
+ unormU[0][0] = uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes)];
+ unormV[0][0] = vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes)];
+ unormU[1][0] = uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjCol];
+ unormV[1][0] = vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjCol];
+ unormU[0][1] = uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjRow];
+ unormV[0][1] = vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjRow];
+ unormU[1][1] = uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjCol + uAdjRow];
+ unormV[1][1] = vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjCol + vAdjRow];
+ } else {
+ unormU[0][0] = *((const uint16_t *)&uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes)]);
+ unormV[0][0] = *((const uint16_t *)&vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes)]);
+ unormU[1][0] = *((const uint16_t *)&uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjCol]);
+ unormV[1][0] = *((const uint16_t *)&vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjCol]);
+ unormU[0][1] = *((const uint16_t *)&uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjRow]);
+ unormV[0][1] = *((const uint16_t *)&vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjRow]);
+ unormU[1][1] = *((const uint16_t *)&uPlane[(uvJ * uRowBytes) + (uvI * yuvChannelBytes) + uAdjCol + uAdjRow]);
+ unormV[1][1] = *((const uint16_t *)&vPlane[(uvJ * vRowBytes) + (uvI * yuvChannelBytes) + vAdjCol + vAdjRow]);
+
+ // clamp incoming data to protect against bad LUT lookups
+ for (int bJ = 0; bJ < 2; ++bJ) {
+ for (int bI = 0; bI < 2; ++bI) {
+ unormU[bI][bJ] = AVIF_MIN(unormU[bI][bJ], yuvMaxChannel);
+ unormV[bI][bJ] = AVIF_MIN(unormV[bI][bJ], yuvMaxChannel);
+ }
+ }
+ }
+
+ if (rgb->chromaUpsampling == AVIF_CHROMA_UPSAMPLING_BILINEAR) {
+ // Bilinear filtering with weights
+ Cb = (unormFloatTableUV[unormU[0][0]] * (9.0f / 16.0f)) + (unormFloatTableUV[unormU[1][0]] * (3.0f / 16.0f)) +
+ (unormFloatTableUV[unormU[0][1]] * (3.0f / 16.0f)) + (unormFloatTableUV[unormU[1][1]] * (1.0f / 16.0f));
+ Cr = (unormFloatTableUV[unormV[0][0]] * (9.0f / 16.0f)) + (unormFloatTableUV[unormV[1][0]] * (3.0f / 16.0f)) +
+ (unormFloatTableUV[unormV[0][1]] * (3.0f / 16.0f)) + (unormFloatTableUV[unormV[1][1]] * (1.0f / 16.0f));
+ } else {
+ // Nearest neighbor; ignore all UVs but the closest one
+ Cb = unormFloatTableUV[unormU[0][0]];
+ Cr = unormFloatTableUV[unormV[0][0]];
+ }
}
}
- // Convert unorm to float
- const float Y = unormFloatTableY[unormY];
- const float Cb = unormFloatTableUV[unormU];
- const float Cr = unormFloatTableUV[unormV];
-
float R, G, B;
- if (state->mode == AVIF_REFORMAT_MODE_IDENTITY) {
- // Formulas 41,42,43 from https://www.itu.int/rec/T-REC-H.273-201612-I/en
- if (hasColor) {
+ if (hasColor) {
+ if (state->mode == AVIF_REFORMAT_MODE_IDENTITY) {
+ // Identity (GBR): Formulas 41,42,43 from https://www.itu.int/rec/T-REC-H.273-201612-I/en
G = Y;
B = Cb;
R = Cr;
} else {
- G = Y;
- B = Y;
- R = Y;
+ // Normal YUV
+ R = Y + (2 * (1 - kr)) * Cr;
+ B = Y + (2 * (1 - kb)) * Cb;
+ G = Y - ((2 * ((kr * (1 - kr) * Cr) + (kb * (1 - kb) * Cb))) / kg);
}
} else {
- R = Y + (2 * (1 - kr)) * Cr;
- B = Y + (2 * (1 - kb)) * Cb;
- G = Y - ((2 * ((kr * (1 - kr) * Cr) + (kb * (1 - kb) * Cb))) / kg);
+ // Monochrome: just populate all channels with luma (identity mode is irrelevant)
+ R = Y;
+ G = Y;
+ B = Y;
}
+
const float Rc = AVIF_CLAMP(R, 0.0f, 1.0f);
const float Gc = AVIF_CLAMP(G, 0.0f, 1.0f);
const float Bc = AVIF_CLAMP(B, 0.0f, 1.0f);
@@ -408,7 +527,7 @@ static avifResult avifImageYUVAnyToRGBAnySlow(avifImage * image, avifRGBImage *
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV16ToRGB16Color(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV16ToRGB16Color(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -416,13 +535,11 @@ static avifResult avifImageYUV16ToRGB16Color(avifImage * image, avifRGBImage * r
const uint32_t rgbPixelBytes = state->rgbPixelBytes;
const float * const unormFloatTableY = state->unormFloatTableY;
const float * const unormFloatTableUV = state->unormFloatTableUV;
- const uint32_t maxUVI = ((image->width + state->formatInfo.chromaShiftX) >> state->formatInfo.chromaShiftX) - 1;
- const uint32_t maxUVJ = ((image->height + state->formatInfo.chromaShiftY) >> state->formatInfo.chromaShiftY) - 1;
const uint16_t yuvMaxChannel = (uint16_t)((1 << image->depth) - 1);
const float rgbMaxChannel = (float)((1 << rgb->depth) - 1);
for (uint32_t j = 0; j < image->height; ++j) {
- const uint32_t uvJ = AVIF_MIN(j >> state->formatInfo.chromaShiftY, maxUVJ);
+ const uint32_t uvJ = j >> state->formatInfo.chromaShiftY;
const uint16_t * const ptrY = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_Y][(j * image->yuvRowBytes[AVIF_CHAN_Y])];
const uint16_t * const ptrU = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
const uint16_t * const ptrV = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
@@ -431,7 +548,7 @@ static avifResult avifImageYUV16ToRGB16Color(avifImage * image, avifRGBImage * r
uint8_t * ptrB = &rgb->pixels[state->rgbOffsetBytesB + (j * rgb->rowBytes)];
for (uint32_t i = 0; i < image->width; ++i) {
- uint32_t uvI = AVIF_MIN(i >> state->formatInfo.chromaShiftX, maxUVI);
+ uint32_t uvI = i >> state->formatInfo.chromaShiftX;
// clamp incoming data to protect against bad LUT lookups
const uint16_t unormY = AVIF_MIN(ptrY[i], yuvMaxChannel);
@@ -462,7 +579,7 @@ static avifResult avifImageYUV16ToRGB16Color(avifImage * image, avifRGBImage * r
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV16ToRGB16Mono(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV16ToRGB16Mono(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -505,7 +622,8 @@ static avifResult avifImageYUV16ToRGB16Mono(avifImage * image, avifRGBImage * rg
}
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV16ToRGB8Color(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+
+static avifResult avifImageYUV16ToRGB8Color(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -513,13 +631,11 @@ static avifResult avifImageYUV16ToRGB8Color(avifImage * image, avifRGBImage * rg
const uint32_t rgbPixelBytes = state->rgbPixelBytes;
const float * const unormFloatTableY = state->unormFloatTableY;
const float * const unormFloatTableUV = state->unormFloatTableUV;
- const uint32_t maxUVI = ((image->width + state->formatInfo.chromaShiftX) >> state->formatInfo.chromaShiftX) - 1;
- const uint32_t maxUVJ = ((image->height + state->formatInfo.chromaShiftY) >> state->formatInfo.chromaShiftY) - 1;
const uint16_t yuvMaxChannel = (uint16_t)((1 << image->depth) - 1);
const float rgbMaxChannel = (float)((1 << rgb->depth) - 1);
for (uint32_t j = 0; j < image->height; ++j) {
- const uint32_t uvJ = AVIF_MIN(j >> state->formatInfo.chromaShiftY, maxUVJ);
+ const uint32_t uvJ = j >> state->formatInfo.chromaShiftY;
const uint16_t * const ptrY = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_Y][(j * image->yuvRowBytes[AVIF_CHAN_Y])];
const uint16_t * const ptrU = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
const uint16_t * const ptrV = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
@@ -528,7 +644,7 @@ static avifResult avifImageYUV16ToRGB8Color(avifImage * image, avifRGBImage * rg
uint8_t * ptrB = &rgb->pixels[state->rgbOffsetBytesB + (j * rgb->rowBytes)];
for (uint32_t i = 0; i < image->width; ++i) {
- uint32_t uvI = AVIF_MIN(i >> state->formatInfo.chromaShiftX, maxUVI);
+ uint32_t uvI = i >> state->formatInfo.chromaShiftX;
// clamp incoming data to protect against bad LUT lookups
const uint16_t unormY = AVIF_MIN(ptrY[i], yuvMaxChannel);
@@ -559,7 +675,7 @@ static avifResult avifImageYUV16ToRGB8Color(avifImage * image, avifRGBImage * rg
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV16ToRGB8Mono(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV16ToRGB8Mono(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -591,9 +707,9 @@ static avifResult avifImageYUV16ToRGB8Mono(avifImage * image, avifRGBImage * rgb
const float Gc = AVIF_CLAMP(G, 0.0f, 1.0f);
const float Bc = AVIF_CLAMP(B, 0.0f, 1.0f);
- *((uint16_t *)ptrR) = (uint16_t)(0.5f + (Rc * rgbMaxChannel));
- *((uint16_t *)ptrG) = (uint16_t)(0.5f + (Gc * rgbMaxChannel));
- *((uint16_t *)ptrB) = (uint16_t)(0.5f + (Bc * rgbMaxChannel));
+ *ptrR = (uint8_t)(0.5f + (Rc * rgbMaxChannel));
+ *ptrG = (uint8_t)(0.5f + (Gc * rgbMaxChannel));
+ *ptrB = (uint8_t)(0.5f + (Bc * rgbMaxChannel));
ptrR += rgbPixelBytes;
ptrG += rgbPixelBytes;
@@ -603,7 +719,7 @@ static avifResult avifImageYUV16ToRGB8Mono(avifImage * image, avifRGBImage * rgb
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV8ToRGB16Color(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV8ToRGB16Color(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -611,12 +727,10 @@ static avifResult avifImageYUV8ToRGB16Color(avifImage * image, avifRGBImage * rg
const uint32_t rgbPixelBytes = state->rgbPixelBytes;
const float * const unormFloatTableY = state->unormFloatTableY;
const float * const unormFloatTableUV = state->unormFloatTableUV;
- const uint32_t maxUVI = ((image->width + state->formatInfo.chromaShiftX) >> state->formatInfo.chromaShiftX) - 1;
- const uint32_t maxUVJ = ((image->height + state->formatInfo.chromaShiftY) >> state->formatInfo.chromaShiftY) - 1;
const float rgbMaxChannel = (float)((1 << rgb->depth) - 1);
for (uint32_t j = 0; j < image->height; ++j) {
- const uint32_t uvJ = AVIF_MIN(j >> state->formatInfo.chromaShiftY, maxUVJ);
+ const uint32_t uvJ = j >> state->formatInfo.chromaShiftY;
const uint8_t * const ptrY = &image->yuvPlanes[AVIF_CHAN_Y][(j * image->yuvRowBytes[AVIF_CHAN_Y])];
const uint8_t * const ptrU = &image->yuvPlanes[AVIF_CHAN_U][(uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
const uint8_t * const ptrV = &image->yuvPlanes[AVIF_CHAN_V][(uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
@@ -625,7 +739,7 @@ static avifResult avifImageYUV8ToRGB16Color(avifImage * image, avifRGBImage * rg
uint8_t * ptrB = &rgb->pixels[state->rgbOffsetBytesB + (j * rgb->rowBytes)];
for (uint32_t i = 0; i < image->width; ++i) {
- uint32_t uvI = AVIF_MIN(i >> state->formatInfo.chromaShiftX, maxUVI);
+ uint32_t uvI = i >> state->formatInfo.chromaShiftX;
// Convert unorm to float (no clamp necessary, the full uint8_t range is a legal lookup)
const float Y = unormFloatTableY[ptrY[i]];
@@ -651,7 +765,7 @@ static avifResult avifImageYUV8ToRGB16Color(avifImage * image, avifRGBImage * rg
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV8ToRGB16Mono(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV8ToRGB16Mono(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -679,9 +793,9 @@ static avifResult avifImageYUV8ToRGB16Mono(avifImage * image, avifRGBImage * rgb
const float Gc = AVIF_CLAMP(G, 0.0f, 1.0f);
const float Bc = AVIF_CLAMP(B, 0.0f, 1.0f);
- *ptrR = (uint8_t)(0.5f + (Rc * rgbMaxChannel));
- *ptrG = (uint8_t)(0.5f + (Gc * rgbMaxChannel));
- *ptrB = (uint8_t)(0.5f + (Bc * rgbMaxChannel));
+ *((uint16_t *)ptrR) = (uint16_t)(0.5f + (Rc * rgbMaxChannel));
+ *((uint16_t *)ptrG) = (uint16_t)(0.5f + (Gc * rgbMaxChannel));
+ *((uint16_t *)ptrB) = (uint16_t)(0.5f + (Bc * rgbMaxChannel));
ptrR += rgbPixelBytes;
ptrG += rgbPixelBytes;
@@ -691,7 +805,7 @@ static avifResult avifImageYUV8ToRGB16Mono(avifImage * image, avifRGBImage * rgb
return AVIF_RESULT_OK;
}
-static avifResult avifImageIdentity8ToRGB8ColorFullRange(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageIdentity8ToRGB8ColorFullRange(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const uint32_t rgbPixelBytes = state->rgbPixelBytes;
for (uint32_t j = 0; j < image->height; ++j) {
@@ -715,7 +829,7 @@ static avifResult avifImageIdentity8ToRGB8ColorFullRange(avifImage * image, avif
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV8ToRGB8Color(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV8ToRGB8Color(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -723,12 +837,10 @@ static avifResult avifImageYUV8ToRGB8Color(avifImage * image, avifRGBImage * rgb
const uint32_t rgbPixelBytes = state->rgbPixelBytes;
const float * const unormFloatTableY = state->unormFloatTableY;
const float * const unormFloatTableUV = state->unormFloatTableUV;
- const uint32_t maxUVI = ((image->width + state->formatInfo.chromaShiftX) >> state->formatInfo.chromaShiftX) - 1;
- const uint32_t maxUVJ = ((image->height + state->formatInfo.chromaShiftY) >> state->formatInfo.chromaShiftY) - 1;
const float rgbMaxChannel = (float)((1 << rgb->depth) - 1);
for (uint32_t j = 0; j < image->height; ++j) {
- const uint32_t uvJ = AVIF_MIN(j >> state->formatInfo.chromaShiftY, maxUVJ);
+ const uint32_t uvJ = j >> state->formatInfo.chromaShiftY;
const uint8_t * const ptrY = &image->yuvPlanes[AVIF_CHAN_Y][(j * image->yuvRowBytes[AVIF_CHAN_Y])];
const uint8_t * const ptrU = &image->yuvPlanes[AVIF_CHAN_U][(uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
const uint8_t * const ptrV = &image->yuvPlanes[AVIF_CHAN_V][(uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
@@ -737,7 +849,7 @@ static avifResult avifImageYUV8ToRGB8Color(avifImage * image, avifRGBImage * rgb
uint8_t * ptrB = &rgb->pixels[state->rgbOffsetBytesB + (j * rgb->rowBytes)];
for (uint32_t i = 0; i < image->width; ++i) {
- uint32_t uvI = AVIF_MIN(i >> state->formatInfo.chromaShiftX, maxUVI);
+ uint32_t uvI = i >> state->formatInfo.chromaShiftX;
// Convert unorm to float (no clamp necessary, the full uint8_t range is a legal lookup)
const float Y = unormFloatTableY[ptrY[i]];
@@ -763,7 +875,7 @@ static avifResult avifImageYUV8ToRGB8Color(avifImage * image, avifRGBImage * rgb
return AVIF_RESULT_OK;
}
-static avifResult avifImageYUV8ToRGB8Mono(avifImage * image, avifRGBImage * rgb, avifReformatState * state)
+static avifResult avifImageYUV8ToRGB8Mono(const avifImage * image, avifRGBImage * rgb, avifReformatState * state)
{
const float kr = state->kr;
const float kg = state->kg;
@@ -803,7 +915,7 @@ static avifResult avifImageYUV8ToRGB8Mono(avifImage * image, avifRGBImage * rgb,
return AVIF_RESULT_OK;
}
-avifResult avifImageYUVToRGB(avifImage * image, avifRGBImage * rgb)
+avifResult avifImageYUVToRGB(const avifImage * image, avifRGBImage * rgb)
{
if (!image->yuvPlanes[AVIF_CHAN_Y]) {
return AVIF_RESULT_REFORMAT_FAILED;
@@ -814,7 +926,7 @@ avifResult avifImageYUVToRGB(avifImage * image, avifRGBImage * rgb)
return AVIF_RESULT_REFORMAT_FAILED;
}
- if (avifRGBFormatHasAlpha(rgb->format)) {
+ if (avifRGBFormatHasAlpha(rgb->format) && !rgb->ignoreAlpha) {
avifAlphaParams params;
params.width = rgb->width;
@@ -840,49 +952,57 @@ avifResult avifImageYUVToRGB(avifImage * image, avifRGBImage * rgb)
}
}
- if (state.mode == AVIF_REFORMAT_MODE_IDENTITY) {
- if ((image->depth == 8) && (rgb->depth == 8) && image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V] &&
- (image->yuvRange == AVIF_RANGE_FULL)) {
- return avifImageIdentity8ToRGB8ColorFullRange(image, rgb, &state);
- }
-
- // TODO: Add more fast paths for identity
- } else {
- if (image->depth > 8) {
- // yuv:u16
+ const avifBool hasColor =
+ (image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V] && (image->yuvFormat != AVIF_PIXEL_FORMAT_YUV400));
- if (rgb->depth > 8) {
- // yuv:u16, rgb:u16
-
- if (image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V]) {
- return avifImageYUV16ToRGB16Color(image, rgb, &state);
- }
- return avifImageYUV16ToRGB16Mono(image, rgb, &state);
- } else {
- // yuv:u16, rgb:u8
+ if (!hasColor || (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) || (rgb->chromaUpsampling == AVIF_CHROMA_UPSAMPLING_NEAREST)) {
+ // None of these fast paths currently support bilinear upsampling, so avoid all of them
+ // unless the YUV data isn't subsampled or they explicitly requested AVIF_CHROMA_UPSAMPLING_NEAREST.
- if (image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V]) {
- return avifImageYUV16ToRGB8Color(image, rgb, &state);
- }
- return avifImageYUV16ToRGB8Mono(image, rgb, &state);
+ if (state.mode == AVIF_REFORMAT_MODE_IDENTITY) {
+ if ((image->depth == 8) && (rgb->depth == 8) && (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) &&
+ (image->yuvRange == AVIF_RANGE_FULL)) {
+ return avifImageIdentity8ToRGB8ColorFullRange(image, rgb, &state);
}
+
+ // TODO: Add more fast paths for identity
} else {
- // yuv:u8
+ if (image->depth > 8) {
+ // yuv:u16
+
+ if (rgb->depth > 8) {
+ // yuv:u16, rgb:u16
- if (rgb->depth > 8) {
- // yuv:u8, rgb:u16
+ if (hasColor) {
+ return avifImageYUV16ToRGB16Color(image, rgb, &state);
+ }
+ return avifImageYUV16ToRGB16Mono(image, rgb, &state);
+ } else {
+ // yuv:u16, rgb:u8
- if (image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V]) {
- return avifImageYUV8ToRGB16Color(image, rgb, &state);
+ if (hasColor) {
+ return avifImageYUV16ToRGB8Color(image, rgb, &state);
+ }
+ return avifImageYUV16ToRGB8Mono(image, rgb, &state);
}
- return avifImageYUV8ToRGB16Mono(image, rgb, &state);
} else {
- // yuv:u8, rgb:u8
+ // yuv:u8
+
+ if (rgb->depth > 8) {
+ // yuv:u8, rgb:u16
- if (image->yuvRowBytes[AVIF_CHAN_U] && image->yuvRowBytes[AVIF_CHAN_V]) {
- return avifImageYUV8ToRGB8Color(image, rgb, &state);
+ if (hasColor) {
+ return avifImageYUV8ToRGB16Color(image, rgb, &state);
+ }
+ return avifImageYUV8ToRGB16Mono(image, rgb, &state);
+ } else {
+ // yuv:u8, rgb:u8
+
+ if (hasColor) {
+ return avifImageYUV8ToRGB8Color(image, rgb, &state);
+ }
+ return avifImageYUV8ToRGB8Mono(image, rgb, &state);
}
- return avifImageYUV8ToRGB8Mono(image, rgb, &state);
}
}
}
diff --git a/chromium/third_party/libavif/src/src/stream.c b/chromium/third_party/libavif/src/src/stream.c
index 0acafe9bae9..32700610e54 100644
--- a/chromium/third_party/libavif/src/src/stream.c
+++ b/chromium/third_party/libavif/src/src/stream.c
@@ -3,6 +3,7 @@
#include "avif/internal.h"
+#include <stdint.h>
#include <string.h>
// ---------------------------------------------------------------------------
@@ -19,17 +20,17 @@ void avifROStreamStart(avifROStream * stream, avifROData * raw)
stream->offset = 0;
}
-avifBool avifROStreamHasBytesLeft(avifROStream * stream, size_t byteCount)
+avifBool avifROStreamHasBytesLeft(const avifROStream * stream, size_t byteCount)
{
return (stream->offset + byteCount) <= stream->raw->size;
}
-size_t avifROStreamRemainingBytes(avifROStream * stream)
+size_t avifROStreamRemainingBytes(const avifROStream * stream)
{
return stream->raw->size - stream->offset;
}
-size_t avifROStreamOffset(avifROStream * stream)
+size_t avifROStreamOffset(const avifROStream * stream)
{
return stream->offset;
}
@@ -159,7 +160,11 @@ avifBool avifROStreamReadBoxHeader(avifROStream * stream, avifBoxHeader * header
CHECK(avifROStreamSkip(stream, 16));
}
- header->size = (size_t)(size - (stream->offset - startOffset));
+ size_t bytesRead = stream->offset - startOffset;
+ if ((size < bytesRead) || ((size - bytesRead) > SIZE_MAX)) {
+ return AVIF_FALSE;
+ }
+ header->size = (size_t)(size - bytesRead);
// Make the assumption here that this box's contents must fit in the remaining portion of the parent stream
if (header->size > avifROStreamRemainingBytes(stream)) {
@@ -168,7 +173,7 @@ avifBool avifROStreamReadBoxHeader(avifROStream * stream, avifBoxHeader * header
return AVIF_TRUE;
}
-avifBool avifROStreamReadVersionAndFlags(avifROStream * stream, uint8_t * version, uint8_t * flags)
+avifBool avifROStreamReadVersionAndFlags(avifROStream * stream, uint8_t * version, uint32_t * flags)
{
uint8_t versionAndFlags[4];
CHECK(avifROStreamRead(stream, versionAndFlags, 4));
@@ -176,7 +181,7 @@ avifBool avifROStreamReadVersionAndFlags(avifROStream * stream, uint8_t * versio
*version = versionAndFlags[0];
}
if (flags) {
- memcpy(flags, &versionAndFlags[1], 3);
+ *flags = (versionAndFlags[1] << 16) + (versionAndFlags[2] << 8) + (versionAndFlags[3] << 0);
}
return AVIF_TRUE;
}
@@ -210,7 +215,7 @@ void avifRWStreamStart(avifRWStream * stream, avifRWData * raw)
stream->offset = 0;
}
-size_t avifRWStreamOffset(avifRWStream * stream)
+size_t avifRWStreamOffset(const avifRWStream * stream)
{
return stream->offset;
}
@@ -234,7 +239,7 @@ void avifRWStreamFinishWrite(avifRWStream * stream)
}
}
-void avifRWStreamWrite(avifRWStream * stream, const uint8_t * data, size_t size)
+void avifRWStreamWrite(avifRWStream * stream, const void * data, size_t size)
{
if (!size) {
return;
@@ -247,10 +252,10 @@ void avifRWStreamWrite(avifRWStream * stream, const uint8_t * data, size_t size)
void avifRWStreamWriteChars(avifRWStream * stream, const char * chars, size_t size)
{
- avifRWStreamWrite(stream, (const uint8_t *)chars, size);
+ avifRWStreamWrite(stream, chars, size);
}
-avifBoxMarker avifRWStreamWriteBox(avifRWStream * stream, const char * type, int version, size_t contentSize)
+avifBoxMarker avifRWStreamWriteFullBox(avifRWStream * stream, const char * type, size_t contentSize, int version, uint32_t flags)
{
avifBoxMarker marker = stream->offset;
size_t headerSize = sizeof(uint32_t) + 4 /* size of type */;
@@ -260,17 +265,25 @@ avifBoxMarker avifRWStreamWriteBox(avifRWStream * stream, const char * type, int
makeRoom(stream, headerSize);
memset(stream->raw->data + stream->offset, 0, headerSize);
+ uint32_t noSize = avifHTONL((uint32_t)(headerSize + contentSize));
+ memcpy(stream->raw->data + stream->offset, &noSize, sizeof(uint32_t));
+ memcpy(stream->raw->data + stream->offset + 4, type, 4);
if (version != -1) {
stream->raw->data[stream->offset + 8] = (uint8_t)version;
+ stream->raw->data[stream->offset + 9] = (uint8_t)((flags >> 16) & 0xff);
+ stream->raw->data[stream->offset + 10] = (uint8_t)((flags >> 8) & 0xff);
+ stream->raw->data[stream->offset + 11] = (uint8_t)((flags >> 0) & 0xff);
}
- uint32_t noSize = avifNTOHL((uint32_t)(headerSize + contentSize));
- memcpy(stream->raw->data + stream->offset, &noSize, sizeof(uint32_t));
- memcpy(stream->raw->data + stream->offset + 4, type, 4);
stream->offset += headerSize;
return marker;
}
+avifBoxMarker avifRWStreamWriteBox(avifRWStream * stream, const char * type, size_t contentSize)
+{
+ return avifRWStreamWriteFullBox(stream, type, contentSize, -1, 0);
+}
+
void avifRWStreamFinishBox(avifRWStream * stream, avifBoxMarker marker)
{
uint32_t noSize = avifNTOHL((uint32_t)(stream->offset - marker));
@@ -303,6 +316,15 @@ void avifRWStreamWriteU32(avifRWStream * stream, uint32_t v)
stream->offset += size;
}
+void avifRWStreamWriteU64(avifRWStream * stream, uint64_t v)
+{
+ size_t size = sizeof(uint64_t);
+ v = avifHTON64(v);
+ makeRoom(stream, size);
+ memcpy(stream->raw->data + stream->offset, &v, size);
+ stream->offset += size;
+}
+
void avifRWStreamWriteZeros(avifRWStream * stream, size_t byteCount)
{
makeRoom(stream, byteCount);
diff --git a/chromium/third_party/libavif/src/src/write.c b/chromium/third_party/libavif/src/src/write.c
index 37f9a9d5118..d3ac127ea7f 100644
--- a/chromium/third_party/libavif/src/src/write.c
+++ b/chromium/third_party/libavif/src/src/write.c
@@ -4,6 +4,7 @@
#include "avif/internal.h"
#include <string.h>
+#include <time.h>
#define MAX_ASSOCIATIONS 16
struct ipmaArray
@@ -19,17 +20,53 @@ static void ipmaPush(struct ipmaArray * ipma, uint8_t assoc, avifBool essential)
++ipma->count;
}
+// Used to store offsets in meta boxes which need to point at mdat offsets that
+// aren't known yet. When an item's mdat payload is written, all registered fixups
+// will have this now-known offset "fixed up".
+typedef struct avifOffsetFixup
+{
+ size_t offset;
+} avifOffsetFixup;
+AVIF_ARRAY_DECLARE(avifOffsetFixupArray, avifOffsetFixup, fixup);
+
static const char alphaURN[] = URN_ALPHA0;
static const size_t alphaURNSize = sizeof(alphaURN);
static const char xmpContentType[] = CONTENT_TYPE_XMP;
static const size_t xmpContentTypeSize = sizeof(xmpContentType);
-static avifBool avifImageIsOpaque(avifImage * image);
-static void fillConfigBox(avifCodec * codec, avifImage * image, avifBool alpha);
+static avifBool avifImageIsOpaque(const avifImage * image);
+static void fillConfigBox(avifCodec * codec, const avifImage * image, avifBool alpha);
static void writeConfigBox(avifRWStream * s, avifCodecConfigurationBox * cfg);
// ---------------------------------------------------------------------------
+// avifCodecEncodeOutput
+
+avifCodecEncodeOutput * avifCodecEncodeOutputCreate(void)
+{
+ avifCodecEncodeOutput * encodeOutput = (avifCodecEncodeOutput *)avifAlloc(sizeof(avifCodecEncodeOutput));
+ memset(encodeOutput, 0, sizeof(avifCodecEncodeOutput));
+ avifArrayCreate(&encodeOutput->samples, sizeof(avifEncodeSample), 1);
+ return encodeOutput;
+}
+
+void avifCodecEncodeOutputAddSample(avifCodecEncodeOutput * encodeOutput, const uint8_t * data, size_t len, avifBool sync)
+{
+ avifEncodeSample * sample = (avifEncodeSample *)avifArrayPushPtr(&encodeOutput->samples);
+ avifRWDataSet(&sample->data, data, len);
+ sample->sync = sync;
+}
+
+void avifCodecEncodeOutputDestroy(avifCodecEncodeOutput * encodeOutput)
+{
+ for (uint32_t sampleIndex = 0; sampleIndex < encodeOutput->samples.count; ++sampleIndex) {
+ avifRWDataFree(&encodeOutput->samples.sample[sampleIndex].data);
+ }
+ avifArrayDestroy(&encodeOutput->samples);
+ avifFree(encodeOutput);
+}
+
+// ---------------------------------------------------------------------------
// avifEncoderItem
// one "item" worth for encoder
@@ -37,16 +74,16 @@ typedef struct avifEncoderItem
{
uint16_t id;
uint8_t type[4];
- avifImage * image; // avifImage* to use when encoding or populating ipma for this item (unowned)
- avifCodec * codec; // only present on type==av01
- avifRWData content; // OBU data on av01, metadata payload for Exif/XMP
+ avifCodec * codec; // only present on type==av01
+ avifCodecEncodeOutput * encodeOutput; // AV1 sample data
+ avifRWData metadataPayload; // Exif/XMP data
avifBool alpha;
const char * infeName;
size_t infeNameSize;
const char * infeContentType;
size_t infeContentTypeSize;
- size_t infeOffsetOffset; // Stream offset where infe offset was written, so it can be properly set after mdat is written
+ avifOffsetFixupArray mdatFixups;
uint16_t irefToID; // if non-zero, make an iref from this id -> irefToID
const char * irefType;
@@ -56,11 +93,24 @@ typedef struct avifEncoderItem
AVIF_ARRAY_DECLARE(avifEncoderItemArray, avifEncoderItem, item);
// ---------------------------------------------------------------------------
+// avifEncoderFrame
+
+typedef struct avifEncoderFrame
+{
+ uint64_t durationInTimescales;
+} avifEncoderFrame;
+AVIF_ARRAY_DECLARE(avifEncoderFrameArray, avifEncoderFrame, frame);
+
+// ---------------------------------------------------------------------------
// avifEncoderData
typedef struct avifEncoderData
{
avifEncoderItemArray items;
+ avifEncoderFrameArray frames;
+ avifImage * imageMetadata;
+ avifEncoderItem * colorItem;
+ avifEncoderItem * alphaItem;
uint16_t lastItemID;
uint16_t primaryItemID;
} avifEncoderData;
@@ -69,7 +119,9 @@ static avifEncoderData * avifEncoderDataCreate()
{
avifEncoderData * data = (avifEncoderData *)avifAlloc(sizeof(avifEncoderData));
memset(data, 0, sizeof(avifEncoderData));
+ data->imageMetadata = avifImageCreateEmpty();
avifArrayCreate(&data->items, sizeof(avifEncoderItem), 8);
+ avifArrayCreate(&data->frames, sizeof(avifEncoderFrame), 1);
return data;
}
@@ -81,6 +133,8 @@ static avifEncoderItem * avifEncoderDataCreateItem(avifEncoderData * data, const
memcpy(item->type, type, sizeof(item->type));
item->infeName = infeName;
item->infeNameSize = infeNameSize;
+ item->encodeOutput = avifCodecEncodeOutputCreate();
+ avifArrayCreate(&item->mdatFixups, sizeof(avifOffsetFixup), 4);
return item;
}
@@ -91,12 +145,22 @@ static void avifEncoderDataDestroy(avifEncoderData * data)
if (item->codec) {
avifCodecDestroy(item->codec);
}
- avifRWDataFree(&item->content);
+ avifCodecEncodeOutputDestroy(item->encodeOutput);
+ avifRWDataFree(&item->metadataPayload);
+ avifArrayDestroy(&item->mdatFixups);
}
+ avifImageDestroy(data->imageMetadata);
avifArrayDestroy(&data->items);
+ avifArrayDestroy(&data->frames);
avifFree(data);
}
+static void avifEncoderItemAddMdatFixup(avifEncoderItem * item, const avifRWStream * s)
+{
+ avifOffsetFixup * fixup = (avifOffsetFixup *)avifArrayPushPtr(&item->mdatFixups);
+ fixup->offset = avifRWStreamOffset(s);
+}
+
// ---------------------------------------------------------------------------
avifEncoder * avifEncoderCreate(void)
@@ -112,6 +176,8 @@ avifEncoder * avifEncoderCreate(void)
encoder->tileColsLog2 = 0;
encoder->speed = AVIF_SPEED_DEFAULT;
encoder->data = avifEncoderDataCreate();
+ encoder->timescale = 1;
+ encoder->keyframeInterval = 0;
return encoder;
}
@@ -121,43 +187,214 @@ void avifEncoderDestroy(avifEncoder * encoder)
avifFree(encoder);
}
-avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData * output)
+static void avifEncoderWriteColorProperties(avifRWStream * s, const avifImage * imageMetadata, struct ipmaArray * ipma, uint8_t * itemPropertyIndex)
+{
+ if (imageMetadata->icc.size > 0) {
+ avifBoxMarker colr = avifRWStreamWriteBox(s, "colr", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteChars(s, "prof", 4); // unsigned int(32) colour_type;
+ avifRWStreamWrite(s, imageMetadata->icc.data, imageMetadata->icc.size);
+ avifRWStreamFinishBox(s, colr);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_FALSE);
+ }
+ } else {
+ avifBoxMarker colr = avifRWStreamWriteBox(s, "colr", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteChars(s, "nclx", 4); // unsigned int(32) colour_type;
+ avifRWStreamWriteU16(s, (uint16_t)imageMetadata->colorPrimaries); // unsigned int(16) colour_primaries;
+ avifRWStreamWriteU16(s, (uint16_t)imageMetadata->transferCharacteristics); // unsigned int(16) transfer_characteristics;
+ avifRWStreamWriteU16(s, (uint16_t)imageMetadata->matrixCoefficients); // unsigned int(16) matrix_coefficients;
+ avifRWStreamWriteU8(s, (imageMetadata->yuvRange == AVIF_RANGE_FULL) ? 0x80 : 0); // unsigned int(1) full_range_flag;
+ // unsigned int(7) reserved = 0;
+ avifRWStreamFinishBox(s, colr);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_FALSE);
+ }
+ }
+
+ // Write (Optional) Transformations
+ if (imageMetadata->transformFlags & AVIF_TRANSFORM_PASP) {
+ avifBoxMarker pasp = avifRWStreamWriteBox(s, "pasp", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteU32(s, imageMetadata->pasp.hSpacing); // unsigned int(32) hSpacing;
+ avifRWStreamWriteU32(s, imageMetadata->pasp.vSpacing); // unsigned int(32) vSpacing;
+ avifRWStreamFinishBox(s, pasp);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_FALSE);
+ }
+ }
+ if (imageMetadata->transformFlags & AVIF_TRANSFORM_CLAP) {
+ avifBoxMarker clap = avifRWStreamWriteBox(s, "clap", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteU32(s, imageMetadata->clap.widthN); // unsigned int(32) cleanApertureWidthN;
+ avifRWStreamWriteU32(s, imageMetadata->clap.widthD); // unsigned int(32) cleanApertureWidthD;
+ avifRWStreamWriteU32(s, imageMetadata->clap.heightN); // unsigned int(32) cleanApertureHeightN;
+ avifRWStreamWriteU32(s, imageMetadata->clap.heightD); // unsigned int(32) cleanApertureHeightD;
+ avifRWStreamWriteU32(s, imageMetadata->clap.horizOffN); // unsigned int(32) horizOffN;
+ avifRWStreamWriteU32(s, imageMetadata->clap.horizOffD); // unsigned int(32) horizOffD;
+ avifRWStreamWriteU32(s, imageMetadata->clap.vertOffN); // unsigned int(32) vertOffN;
+ avifRWStreamWriteU32(s, imageMetadata->clap.vertOffD); // unsigned int(32) vertOffD;
+ avifRWStreamFinishBox(s, clap);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_TRUE);
+ }
+ }
+ if (imageMetadata->transformFlags & AVIF_TRANSFORM_IROT) {
+ avifBoxMarker irot = avifRWStreamWriteBox(s, "irot", AVIF_BOX_SIZE_TBD);
+ uint8_t angle = imageMetadata->irot.angle & 0x3;
+ avifRWStreamWrite(s, &angle, 1); // unsigned int (6) reserved = 0; unsigned int (2) angle;
+ avifRWStreamFinishBox(s, irot);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_TRUE);
+ }
+ }
+ if (imageMetadata->transformFlags & AVIF_TRANSFORM_IMIR) {
+ avifBoxMarker imir = avifRWStreamWriteBox(s, "imir", AVIF_BOX_SIZE_TBD);
+ uint8_t axis = imageMetadata->imir.axis & 0x1;
+ avifRWStreamWrite(s, &axis, 1); // unsigned int (7) reserved = 0; unsigned int (1) axis;
+ avifRWStreamFinishBox(s, imir);
+ if (ipma && itemPropertyIndex) {
+ ipmaPush(ipma, ++(*itemPropertyIndex), AVIF_TRUE);
+ }
+ }
+}
+
+// Write unassociated metadata items (EXIF, XMP) to a small meta box inside of a trak box.
+// These items are implicitly associated with the track they are contained within.
+static void avifEncoderWriteTrackMetaBox(avifEncoder * encoder, avifRWStream * s)
+{
+ // Count how many non-av01 items (such as EXIF/XMP) are being written
+ uint32_t metadataItemCount = 0;
+ for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
+ avifEncoderItem * item = &encoder->data->items.item[itemIndex];
+ if (memcmp(item->type, "av01", 4) != 0) {
+ ++metadataItemCount;
+ }
+ }
+ if (metadataItemCount == 0) {
+ // Don't even bother writing the trak meta box
+ return;
+ }
+
+ avifBoxMarker meta = avifRWStreamWriteFullBox(s, "meta", AVIF_BOX_SIZE_TBD, 0, 0);
+
+ avifBoxMarker hdlr = avifRWStreamWriteFullBox(s, "hdlr", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(s, 0); // unsigned int(32) pre_defined = 0;
+ avifRWStreamWriteChars(s, "pict", 4); // unsigned int(32) handler_type;
+ avifRWStreamWriteZeros(s, 12); // const unsigned int(32)[3] reserved = 0;
+ avifRWStreamWriteChars(s, "libavif", 8); // string name; (writing null terminator)
+ avifRWStreamFinishBox(s, hdlr);
+
+ avifBoxMarker iloc = avifRWStreamWriteFullBox(s, "iloc", AVIF_BOX_SIZE_TBD, 0, 0);
+ uint8_t offsetSizeAndLengthSize = (4 << 4) + (4 << 0); // unsigned int(4) offset_size;
+ // unsigned int(4) length_size;
+ avifRWStreamWrite(s, &offsetSizeAndLengthSize, 1); //
+ avifRWStreamWriteZeros(s, 1); // unsigned int(4) base_offset_size;
+ // unsigned int(4) reserved;
+ avifRWStreamWriteU16(s, (uint16_t)metadataItemCount); // unsigned int(16) item_count;
+ for (uint32_t trakItemIndex = 0; trakItemIndex < encoder->data->items.count; ++trakItemIndex) {
+ avifEncoderItem * item = &encoder->data->items.item[trakItemIndex];
+ if (memcmp(item->type, "av01", 4) == 0) {
+ // Skip over all non-metadata items
+ continue;
+ }
+
+ avifRWStreamWriteU16(s, item->id); // unsigned int(16) item_ID;
+ avifRWStreamWriteU16(s, 0); // unsigned int(16) data_reference_index;
+ avifRWStreamWriteU16(s, 1); // unsigned int(16) extent_count;
+ avifEncoderItemAddMdatFixup(item, s); //
+ avifRWStreamWriteU32(s, 0 /* set later */); // unsigned int(offset_size*8) extent_offset;
+ avifRWStreamWriteU32(s, (uint32_t)item->metadataPayload.size); // unsigned int(length_size*8) extent_length;
+ }
+ avifRWStreamFinishBox(s, iloc);
+
+ avifBoxMarker iinf = avifRWStreamWriteFullBox(s, "iinf", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU16(s, (uint16_t)metadataItemCount); // unsigned int(16) entry_count;
+ for (uint32_t trakItemIndex = 0; trakItemIndex < encoder->data->items.count; ++trakItemIndex) {
+ avifEncoderItem * item = &encoder->data->items.item[trakItemIndex];
+ if (memcmp(item->type, "av01", 4) == 0) {
+ continue;
+ }
+
+ avifBoxMarker infe = avifRWStreamWriteFullBox(s, "infe", AVIF_BOX_SIZE_TBD, 2, 0);
+ avifRWStreamWriteU16(s, item->id); // unsigned int(16) item_ID;
+ avifRWStreamWriteU16(s, 0); // unsigned int(16) item_protection_index;
+ avifRWStreamWrite(s, item->type, 4); // unsigned int(32) item_type;
+ avifRWStreamWriteChars(s, item->infeName, item->infeNameSize); // string item_name; (writing null terminator)
+ if (item->infeContentType && item->infeContentTypeSize) { // string content_type; (writing null terminator)
+ avifRWStreamWriteChars(s, item->infeContentType, item->infeContentTypeSize);
+ }
+ avifRWStreamFinishBox(s, infe);
+ }
+ avifRWStreamFinishBox(s, iinf);
+
+ avifRWStreamFinishBox(s, meta);
+}
+
+avifResult avifEncoderAddImage(avifEncoder * encoder, const avifImage * image, uint64_t durationInTimescales, uint32_t addImageFlags)
{
+ // -----------------------------------------------------------------------
+ // Validate image
+
if ((image->depth != 8) && (image->depth != 10) && (image->depth != 12)) {
return AVIF_RESULT_UNSUPPORTED_DEPTH;
}
- avifResult result = AVIF_RESULT_UNKNOWN_ERROR;
+ if (!image->width || !image->height || !image->yuvPlanes[AVIF_CHAN_Y]) {
+ return AVIF_RESULT_NO_CONTENT;
+ }
+
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) {
+ return AVIF_RESULT_NO_YUV_FORMAT_SELECTED;
+ }
+
+ // -----------------------------------------------------------------------
- avifEncoderItem * colorItem = avifEncoderDataCreateItem(encoder->data, "av01", "Color", 6);
- colorItem->image = image;
- colorItem->codec = avifCodecCreate(encoder->codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE);
- if (!colorItem->codec) {
- // Just bail out early, we're not surviving this function without an encoder compiled in
- return AVIF_RESULT_NO_CODEC_AVAILABLE;
+ if (durationInTimescales == 0) {
+ durationInTimescales = 1;
}
- encoder->data->primaryItemID = colorItem->id;
- avifBool imageIsOpaque = avifImageIsOpaque(image);
- if (!imageIsOpaque) {
- avifEncoderItem * alphaItem = avifEncoderDataCreateItem(encoder->data, "av01", "Alpha", 6);
- alphaItem->image = image;
- alphaItem->codec = avifCodecCreate(encoder->codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE);
- if (!alphaItem->codec) {
+ if (encoder->data->items.count == 0) {
+ // Make a copy of the first image's metadata (sans pixels) for future writing/validation
+ avifImageCopy(encoder->data->imageMetadata, image, 0);
+
+ // Prepare all AV1 items
+
+ encoder->data->colorItem = avifEncoderDataCreateItem(encoder->data, "av01", "Color", 6);
+ encoder->data->colorItem->codec = avifCodecCreate(encoder->codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE);
+ if (!encoder->data->colorItem->codec) {
+ // Just bail out early, we're not surviving this function without an encoder compiled in
return AVIF_RESULT_NO_CODEC_AVAILABLE;
}
- alphaItem->alpha = AVIF_TRUE;
- alphaItem->irefToID = encoder->data->primaryItemID;
- alphaItem->irefType = "auxl";
- }
+ encoder->data->primaryItemID = encoder->data->colorItem->id;
+
+ avifBool needsAlpha = (image->alphaPlane != NULL);
+ if (addImageFlags & AVIF_ADD_IMAGE_FLAG_SINGLE) {
+ // If encoding a single image in which the alpha plane exists but is entirely opaque,
+ // simply skip writing an alpha AV1 payload entirely, as it'll be interpreted as opaque
+ // and is less bytes.
+ //
+ // However, if encoding an image sequence, the first frame's alpha plane being entirely
+ // opaque could be a false positive for removing the alpha AV1 payload, as it might simply
+ // be a fade out later in the sequence. This is why avifImageIsOpaque() is only called
+ // when encoding a single image.
+
+ needsAlpha = !avifImageIsOpaque(image);
+ }
+ if (needsAlpha) {
+ encoder->data->alphaItem = avifEncoderDataCreateItem(encoder->data, "av01", "Alpha", 6);
+ encoder->data->alphaItem->codec = avifCodecCreate(encoder->codecChoice, AVIF_CODEC_FLAG_CAN_ENCODE);
+ if (!encoder->data->alphaItem->codec) {
+ return AVIF_RESULT_NO_CODEC_AVAILABLE;
+ }
+ encoder->data->alphaItem->alpha = AVIF_TRUE;
+ encoder->data->alphaItem->irefToID = encoder->data->primaryItemID;
+ encoder->data->alphaItem->irefType = "auxl";
+ }
- // -----------------------------------------------------------------------
- // Create metadata items (Exif, XMP)
+ // -----------------------------------------------------------------------
+ // Create metadata items (Exif, XMP)
- if (image->exif.size > 0) {
- // Validate Exif payload (if any) and find TIFF header offset
- uint32_t exifTiffHeaderOffset = 0;
if (image->exif.size > 0) {
+ // Validate Exif payload (if any) and find TIFF header offset
+ uint32_t exifTiffHeaderOffset = 0;
if (image->exif.size < 4) {
// Can't even fit the TIFF header, something is wrong
return AVIF_RESULT_INVALID_EXIF_PAYLOAD;
@@ -178,91 +415,134 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
// Couldn't find the TIFF header
return AVIF_RESULT_INVALID_EXIF_PAYLOAD;
}
+
+ avifEncoderItem * exifItem = avifEncoderDataCreateItem(encoder->data, "Exif", "Exif", 5);
+ exifItem->irefToID = encoder->data->primaryItemID;
+ exifItem->irefType = "cdsc";
+
+ avifRWDataRealloc(&exifItem->metadataPayload, sizeof(uint32_t) + image->exif.size);
+ exifTiffHeaderOffset = avifHTONL(exifTiffHeaderOffset);
+ memcpy(exifItem->metadataPayload.data, &exifTiffHeaderOffset, sizeof(uint32_t));
+ memcpy(exifItem->metadataPayload.data + sizeof(uint32_t), image->exif.data, image->exif.size);
}
- avifEncoderItem * exifItem = avifEncoderDataCreateItem(encoder->data, "Exif", "Exif", 5);
- exifItem->irefToID = encoder->data->primaryItemID;
- exifItem->irefType = "cdsc";
+ if (image->xmp.size > 0) {
+ avifEncoderItem * xmpItem = avifEncoderDataCreateItem(encoder->data, "mime", "XMP", 4);
+ xmpItem->irefToID = encoder->data->primaryItemID;
+ xmpItem->irefType = "cdsc";
- avifRWDataRealloc(&exifItem->content, sizeof(uint32_t) + image->exif.size);
- exifTiffHeaderOffset = avifHTONL(exifTiffHeaderOffset);
- memcpy(exifItem->content.data, &exifTiffHeaderOffset, sizeof(uint32_t));
- memcpy(exifItem->content.data + sizeof(uint32_t), image->exif.data, image->exif.size);
- }
+ xmpItem->infeContentType = xmpContentType;
+ xmpItem->infeContentTypeSize = xmpContentTypeSize;
+ avifRWDataSet(&xmpItem->metadataPayload, image->xmp.data, image->xmp.size);
+ }
- if (image->xmp.size > 0) {
- avifEncoderItem * xmpItem = avifEncoderDataCreateItem(encoder->data, "mime", "XMP", 4);
- xmpItem->irefToID = encoder->data->primaryItemID;
- xmpItem->irefType = "cdsc";
+ // -----------------------------------------------------------------------
+ // Pre-fill config boxes based on image (codec can query/update later)
- xmpItem->infeContentType = xmpContentType;
- xmpItem->infeContentTypeSize = xmpContentTypeSize;
- avifRWDataSet(&xmpItem->content, image->xmp.data, image->xmp.size);
+ for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
+ avifEncoderItem * item = &encoder->data->items.item[itemIndex];
+ if (item->codec) {
+ fillConfigBox(item->codec, image, item->alpha);
+ }
+ }
+ } else {
+ // Another frame in an image sequence
+
+ if (encoder->data->alphaItem && !image->alphaPlane) {
+ // If the first image in the sequence had an alpha plane (even if fully opaque), all
+ // subsequence images must have alpha as well.
+ return AVIF_RESULT_ENCODE_ALPHA_FAILED;
+ }
}
// -----------------------------------------------------------------------
- // Pre-fill config boxes based on image (codec can query/update later)
+ // Encode AV1 OBUs
+
+ if (encoder->keyframeInterval && ((encoder->data->frames.count % encoder->keyframeInterval) == 0)) {
+ addImageFlags |= AVIF_ADD_IMAGE_FLAG_FORCE_KEYFRAME;
+ }
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
- if (item->codec && item->image) {
- fillConfigBox(item->codec, item->image, item->alpha);
+ if (item->codec) {
+ if (!item->codec->encodeImage(item->codec, encoder, image, item->alpha, addImageFlags, item->encodeOutput)) {
+ return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
+ }
}
}
- // -----------------------------------------------------------------------
- // Begin write stream
-
- avifRWStream s;
- avifRWStreamStart(&s, output);
-
- // -----------------------------------------------------------------------
- // Validate image
-
- if (!image->width || !image->height || !image->yuvPlanes[AVIF_CHAN_Y]) {
- result = AVIF_RESULT_NO_CONTENT;
- goto writeCleanup;
- }
+ avifEncoderFrame * frame = (avifEncoderFrame *)avifArrayPushPtr(&encoder->data->frames);
+ frame->durationInTimescales = durationInTimescales;
+ return AVIF_RESULT_OK;
+}
- if (image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) {
- result = AVIF_RESULT_NO_YUV_FORMAT_SELECTED;
- goto writeCleanup;
+avifResult avifEncoderFinish(avifEncoder * encoder, avifRWData * output)
+{
+ if (encoder->data->items.count == 0) {
+ return AVIF_RESULT_NO_CONTENT;
}
// -----------------------------------------------------------------------
- // Encode AV1 OBUs
+ // Finish up AV1 encoding
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
- if (item->codec && item->image) {
- if (!item->codec->encodeImage(item->codec, item->image, encoder, &item->content, item->alpha)) {
- result = item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
- goto writeCleanup;
+ if (item->codec) {
+ if (!item->codec->encodeFinish(item->codec, item->encodeOutput)) {
+ return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
+ }
+
+ if (item->encodeOutput->samples.count != encoder->data->frames.count) {
+ return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
}
- // TODO: rethink this if/when image grid encoding support is added
+ size_t obuSize = 0;
+ for (uint32_t sampleIndex = 0; sampleIndex < item->encodeOutput->samples.count; ++sampleIndex) {
+ obuSize += item->encodeOutput->samples.sample[sampleIndex].data.size;
+ }
if (item->alpha) {
- encoder->ioStats.alphaOBUSize = item->content.size;
+ encoder->ioStats.alphaOBUSize = obuSize;
} else {
- encoder->ioStats.colorOBUSize = item->content.size;
+ encoder->ioStats.colorOBUSize = obuSize;
}
}
}
// -----------------------------------------------------------------------
+ // Begin write stream
+
+ const avifImage * imageMetadata = encoder->data->imageMetadata;
+ // The epoch for creation_time and modification_time is midnight, Jan. 1,
+ // 1904, in UTC time. Add the number of seconds between that epoch and the
+ // Unix epoch.
+ uint64_t now = (uint64_t)time(NULL) + 2082844800;
+
+ avifRWStream s;
+ avifRWStreamStart(&s, output);
+
+ // -----------------------------------------------------------------------
// Write ftyp
- avifBoxMarker ftyp = avifRWStreamWriteBox(&s, "ftyp", -1, 0);
- avifRWStreamWriteChars(&s, "avif", 4); // unsigned int(32) major_brand;
- avifRWStreamWriteU32(&s, 0); // unsigned int(32) minor_version;
- avifRWStreamWriteChars(&s, "avif", 4); // unsigned int(32) compatible_brands[];
- avifRWStreamWriteChars(&s, "mif1", 4); // ... compatible_brands[]
- avifRWStreamWriteChars(&s, "miaf", 4); // ... compatible_brands[]
- if ((image->depth == 8) || (image->depth == 10)) { //
- if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420) { //
- avifRWStreamWriteChars(&s, "MA1B", 4); // ... compatible_brands[]
- } else if (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) { //
- avifRWStreamWriteChars(&s, "MA1A", 4); // ... compatible_brands[]
+ const char * majorBrand = "avif";
+ if (encoder->data->frames.count > 1) {
+ majorBrand = "avis";
+ }
+
+ avifBoxMarker ftyp = avifRWStreamWriteBox(&s, "ftyp", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteChars(&s, majorBrand, 4); // unsigned int(32) major_brand;
+ avifRWStreamWriteU32(&s, 0); // unsigned int(32) minor_version;
+ avifRWStreamWriteChars(&s, "avif", 4); // unsigned int(32) compatible_brands[];
+ if (encoder->data->frames.count > 1) { //
+ avifRWStreamWriteChars(&s, "avis", 4); // ... compatible_brands[]
+ avifRWStreamWriteChars(&s, "msf1", 4); // ... compatible_brands[]
+ } //
+ avifRWStreamWriteChars(&s, "mif1", 4); // ... compatible_brands[]
+ avifRWStreamWriteChars(&s, "miaf", 4); // ... compatible_brands[]
+ if ((imageMetadata->depth == 8) || (imageMetadata->depth == 10)) { //
+ if (imageMetadata->yuvFormat == AVIF_PIXEL_FORMAT_YUV420) { //
+ avifRWStreamWriteChars(&s, "MA1B", 4); // ... compatible_brands[]
+ } else if (imageMetadata->yuvFormat == AVIF_PIXEL_FORMAT_YUV444) { //
+ avifRWStreamWriteChars(&s, "MA1A", 4); // ... compatible_brands[]
}
}
avifRWStreamFinishBox(&s, ftyp);
@@ -270,12 +550,12 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
// -----------------------------------------------------------------------
// Start meta
- avifBoxMarker meta = avifRWStreamWriteBox(&s, "meta", 0, 0);
+ avifBoxMarker meta = avifRWStreamWriteFullBox(&s, "meta", AVIF_BOX_SIZE_TBD, 0, 0);
// -----------------------------------------------------------------------
// Write hdlr
- avifBoxMarker hdlr = avifRWStreamWriteBox(&s, "hdlr", 0, 0);
+ avifBoxMarker hdlr = avifRWStreamWriteFullBox(&s, "hdlr", AVIF_BOX_SIZE_TBD, 0, 0);
avifRWStreamWriteU32(&s, 0); // unsigned int(32) pre_defined = 0;
avifRWStreamWriteChars(&s, "pict", 4); // unsigned int(32) handler_type;
avifRWStreamWriteZeros(&s, 12); // const unsigned int(32)[3] reserved = 0;
@@ -286,14 +566,14 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
// Write pitm
if (encoder->data->primaryItemID != 0) {
- avifRWStreamWriteBox(&s, "pitm", 0, sizeof(uint16_t));
+ avifRWStreamWriteFullBox(&s, "pitm", sizeof(uint16_t), 0, 0);
avifRWStreamWriteU16(&s, encoder->data->primaryItemID); // unsigned int(16) item_ID;
}
// -----------------------------------------------------------------------
// Write iloc
- avifBoxMarker iloc = avifRWStreamWriteBox(&s, "iloc", 0, 0);
+ avifBoxMarker iloc = avifRWStreamWriteFullBox(&s, "iloc", AVIF_BOX_SIZE_TBD, 0, 0);
uint8_t offsetSizeAndLengthSize = (4 << 4) + (4 << 0); // unsigned int(4) offset_size;
// unsigned int(4) length_size;
@@ -304,12 +584,26 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
- avifRWStreamWriteU16(&s, item->id); // unsigned int(16) item_ID;
- avifRWStreamWriteU16(&s, 0); // unsigned int(16) data_reference_index;
- avifRWStreamWriteU16(&s, 1); // unsigned int(16) extent_count;
- item->infeOffsetOffset = avifRWStreamOffset(&s); //
- avifRWStreamWriteU32(&s, 0 /* set later */); // unsigned int(offset_size*8) extent_offset;
- avifRWStreamWriteU32(&s, (uint32_t)item->content.size); // unsigned int(length_size*8) extent_length;
+
+ uint32_t contentSize = (uint32_t)item->metadataPayload.size;
+ if (item->encodeOutput->samples.count > 0) {
+ // This is choosing sample 0's size as there are two cases here:
+ // * This is a single image, in which case this is correct
+ // * This is an image sequence, but this file should still be a valid single-image avif,
+ // so there must still be a primary item pointing at a sync sample. Since the first
+ // frame of the image sequence is guaranteed to be a sync sample, it is chosen here.
+ //
+ // TODO: Offer the ability for a user to specify which frame in the sequence should
+ // become the primary item's image, and force that frame to be a keyframe.
+ contentSize = (uint32_t)item->encodeOutput->samples.sample[0].data.size;
+ }
+
+ avifRWStreamWriteU16(&s, item->id); // unsigned int(16) item_ID;
+ avifRWStreamWriteU16(&s, 0); // unsigned int(16) data_reference_index;
+ avifRWStreamWriteU16(&s, 1); // unsigned int(16) extent_count;
+ avifEncoderItemAddMdatFixup(item, &s); //
+ avifRWStreamWriteU32(&s, 0 /* set later */); // unsigned int(offset_size*8) extent_offset;
+ avifRWStreamWriteU32(&s, (uint32_t)contentSize); // unsigned int(length_size*8) extent_length;
}
avifRWStreamFinishBox(&s, iloc);
@@ -317,13 +611,13 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
// -----------------------------------------------------------------------
// Write iinf
- avifBoxMarker iinf = avifRWStreamWriteBox(&s, "iinf", 0, 0);
+ avifBoxMarker iinf = avifRWStreamWriteFullBox(&s, "iinf", AVIF_BOX_SIZE_TBD, 0, 0);
avifRWStreamWriteU16(&s, (uint16_t)encoder->data->items.count); // unsigned int(16) entry_count;
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
- avifBoxMarker infe = avifRWStreamWriteBox(&s, "infe", 2, 0);
+ avifBoxMarker infe = avifRWStreamWriteFullBox(&s, "infe", AVIF_BOX_SIZE_TBD, 2, 0);
avifRWStreamWriteU16(&s, item->id); // unsigned int(16) item_ID;
avifRWStreamWriteU16(&s, 0); // unsigned int(16) item_protection_index;
avifRWStreamWrite(&s, item->type, 4); // unsigned int(32) item_type;
@@ -339,47 +633,52 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
// -----------------------------------------------------------------------
// Write iref boxes
+ avifBoxMarker iref = 0;
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
if (item->irefToID != 0) {
- avifBoxMarker iref = avifRWStreamWriteBox(&s, "iref", 0, 0);
- avifBoxMarker refType = avifRWStreamWriteBox(&s, item->irefType, -1, 0);
+ if (!iref) {
+ iref = avifRWStreamWriteFullBox(&s, "iref", AVIF_BOX_SIZE_TBD, 0, 0);
+ }
+ avifBoxMarker refType = avifRWStreamWriteBox(&s, item->irefType, AVIF_BOX_SIZE_TBD);
avifRWStreamWriteU16(&s, item->id); // unsigned int(16) from_item_ID;
avifRWStreamWriteU16(&s, 1); // unsigned int(16) reference_count;
avifRWStreamWriteU16(&s, item->irefToID); // unsigned int(16) to_item_ID;
avifRWStreamFinishBox(&s, refType);
- avifRWStreamFinishBox(&s, iref);
}
}
+ if (iref) {
+ avifRWStreamFinishBox(&s, iref);
+ }
// -----------------------------------------------------------------------
// Write iprp -> ipco/ipma
- avifBoxMarker iprp = avifRWStreamWriteBox(&s, "iprp", -1, 0);
+ avifBoxMarker iprp = avifRWStreamWriteBox(&s, "iprp", AVIF_BOX_SIZE_TBD);
uint8_t itemPropertyIndex = 0;
- avifBoxMarker ipco = avifRWStreamWriteBox(&s, "ipco", -1, 0);
+ avifBoxMarker ipco = avifRWStreamWriteBox(&s, "ipco", AVIF_BOX_SIZE_TBD);
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
memset(&item->ipma, 0, sizeof(item->ipma));
- if (!item->image || !item->codec) {
+ if (!item->codec) {
// No ipma to write for this item
continue;
}
// Properties all av01 items need
- avifBoxMarker ispe = avifRWStreamWriteBox(&s, "ispe", 0, 0);
- avifRWStreamWriteU32(&s, item->image->width); // unsigned int(32) image_width;
- avifRWStreamWriteU32(&s, item->image->height); // unsigned int(32) image_height;
+ avifBoxMarker ispe = avifRWStreamWriteFullBox(&s, "ispe", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, imageMetadata->width); // unsigned int(32) image_width;
+ avifRWStreamWriteU32(&s, imageMetadata->height); // unsigned int(32) image_height;
avifRWStreamFinishBox(&s, ispe);
ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE); // ipma is 1-indexed, doing this afterwards is correct
- uint8_t channelCount = item->alpha ? 1 : 3; // TODO: write the correct value here when adding monochrome support
- avifBoxMarker pixi = avifRWStreamWriteBox(&s, "pixi", 0, 0);
+ uint8_t channelCount = (item->alpha || (imageMetadata->yuvFormat == AVIF_PIXEL_FORMAT_YUV400)) ? 1 : 3;
+ avifBoxMarker pixi = avifRWStreamWriteFullBox(&s, "pixi", AVIF_BOX_SIZE_TBD, 0, 0);
avifRWStreamWriteU8(&s, channelCount); // unsigned int (8) num_channels;
for (uint8_t chan = 0; chan < channelCount; ++chan) {
- avifRWStreamWriteU8(&s, (uint8_t)item->image->depth); // unsigned int (8) bits_per_channel;
+ avifRWStreamWriteU8(&s, (uint8_t)imageMetadata->depth); // unsigned int (8) bits_per_channel;
}
avifRWStreamFinishBox(&s, pixi);
ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE);
@@ -390,71 +689,19 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
if (item->alpha) {
// Alpha specific properties
- avifBoxMarker auxC = avifRWStreamWriteBox(&s, "auxC", 0, 0);
+ avifBoxMarker auxC = avifRWStreamWriteFullBox(&s, "auxC", AVIF_BOX_SIZE_TBD, 0, 0);
avifRWStreamWriteChars(&s, alphaURN, alphaURNSize); // string aux_type;
avifRWStreamFinishBox(&s, auxC);
ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE);
} else {
// Color specific properties
- if (item->image->icc.data && (item->image->icc.size > 0)) {
- avifBoxMarker colr = avifRWStreamWriteBox(&s, "colr", -1, 0);
- avifRWStreamWriteChars(&s, "prof", 4); // unsigned int(32) colour_type;
- avifRWStreamWrite(&s, item->image->icc.data, item->image->icc.size);
- avifRWStreamFinishBox(&s, colr);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE);
- } else {
- avifBoxMarker colr = avifRWStreamWriteBox(&s, "colr", -1, 0);
- avifRWStreamWriteChars(&s, "nclx", 4); // unsigned int(32) colour_type;
- avifRWStreamWriteU16(&s, (uint16_t)item->image->colorPrimaries); // unsigned int(16) colour_primaries;
- avifRWStreamWriteU16(&s, (uint16_t)item->image->transferCharacteristics); // unsigned int(16) transfer_characteristics;
- avifRWStreamWriteU16(&s, (uint16_t)item->image->matrixCoefficients); // unsigned int(16) matrix_coefficients;
- avifRWStreamWriteU8(&s, item->image->yuvRange & 0x80); // unsigned int(1) full_range_flag;
- // unsigned int(7) reserved = 0;
- avifRWStreamFinishBox(&s, colr);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE);
- }
-
- // Write (Optional) Transformations
- if (item->image->transformFlags & AVIF_TRANSFORM_PASP) {
- avifBoxMarker pasp = avifRWStreamWriteBox(&s, "pasp", -1, 0);
- avifRWStreamWriteU32(&s, item->image->pasp.hSpacing); // unsigned int(32) hSpacing;
- avifRWStreamWriteU32(&s, item->image->pasp.vSpacing); // unsigned int(32) vSpacing;
- avifRWStreamFinishBox(&s, pasp);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_FALSE);
- }
- if (item->image->transformFlags & AVIF_TRANSFORM_CLAP) {
- avifBoxMarker clap = avifRWStreamWriteBox(&s, "clap", -1, 0);
- avifRWStreamWriteU32(&s, item->image->clap.widthN); // unsigned int(32) cleanApertureWidthN;
- avifRWStreamWriteU32(&s, item->image->clap.widthD); // unsigned int(32) cleanApertureWidthD;
- avifRWStreamWriteU32(&s, item->image->clap.heightN); // unsigned int(32) cleanApertureHeightN;
- avifRWStreamWriteU32(&s, item->image->clap.heightD); // unsigned int(32) cleanApertureHeightD;
- avifRWStreamWriteU32(&s, item->image->clap.horizOffN); // unsigned int(32) horizOffN;
- avifRWStreamWriteU32(&s, item->image->clap.horizOffD); // unsigned int(32) horizOffD;
- avifRWStreamWriteU32(&s, item->image->clap.vertOffN); // unsigned int(32) vertOffN;
- avifRWStreamWriteU32(&s, item->image->clap.vertOffD); // unsigned int(32) vertOffD;
- avifRWStreamFinishBox(&s, clap);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_TRUE);
- }
- if (item->image->transformFlags & AVIF_TRANSFORM_IROT) {
- avifBoxMarker irot = avifRWStreamWriteBox(&s, "irot", -1, 0);
- uint8_t angle = item->image->irot.angle & 0x3;
- avifRWStreamWrite(&s, &angle, 1); // unsigned int (6) reserved = 0; unsigned int (2) angle;
- avifRWStreamFinishBox(&s, irot);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_TRUE);
- }
- if (item->image->transformFlags & AVIF_TRANSFORM_IMIR) {
- avifBoxMarker imir = avifRWStreamWriteBox(&s, "imir", -1, 0);
- uint8_t axis = item->image->imir.axis & 0x1;
- avifRWStreamWrite(&s, &axis, 1); // unsigned int (7) reserved = 0; unsigned int (1) axis;
- avifRWStreamFinishBox(&s, imir);
- ipmaPush(&item->ipma, ++itemPropertyIndex, AVIF_TRUE);
- }
+ avifEncoderWriteColorProperties(&s, imageMetadata, &item->ipma, &itemPropertyIndex);
}
}
avifRWStreamFinishBox(&s, ipco);
- avifBoxMarker ipma = avifRWStreamWriteBox(&s, "ipma", 0, 0);
+ avifBoxMarker ipma = avifRWStreamWriteFullBox(&s, "ipma", AVIF_BOX_SIZE_TBD, 0, 0);
{
int ipmaCount = 0;
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
@@ -492,22 +739,236 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
avifRWStreamFinishBox(&s, meta);
// -----------------------------------------------------------------------
+ // Write tracks (if an image sequence)
+
+ if (encoder->data->frames.count > 1) {
+ static const uint32_t unityMatrix[9] = { 0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000 };
+
+ uint64_t durationInTimescales = 0;
+ for (uint32_t frameIndex = 0; frameIndex < encoder->data->frames.count; ++frameIndex) {
+ const avifEncoderFrame * frame = &encoder->data->frames.frame[frameIndex];
+ durationInTimescales += frame->durationInTimescales;
+ }
+
+ // -------------------------------------------------------------------
+ // Start moov
+
+ avifBoxMarker moov = avifRWStreamWriteBox(&s, "moov", AVIF_BOX_SIZE_TBD);
+
+ avifBoxMarker mvhd = avifRWStreamWriteFullBox(&s, "mvhd", AVIF_BOX_SIZE_TBD, 1, 0);
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) creation_time;
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) modification_time;
+ avifRWStreamWriteU32(&s, (uint32_t)encoder->timescale); // unsigned int(32) timescale;
+ avifRWStreamWriteU64(&s, durationInTimescales); // unsigned int(64) duration;
+ avifRWStreamWriteU32(&s, 0x00010000); // template int(32) rate = 0x00010000; // typically 1.0
+ avifRWStreamWriteU16(&s, 0x0100); // template int(16) volume = 0x0100; // typically, full volume
+ avifRWStreamWriteU16(&s, 0); // const bit(16) reserved = 0;
+ avifRWStreamWriteZeros(&s, 8); // const unsigned int(32)[2] reserved = 0;
+ avifRWStreamWrite(&s, unityMatrix, sizeof(unityMatrix));
+ avifRWStreamWriteZeros(&s, 24); // bit(32)[6] pre_defined = 0;
+ avifRWStreamWriteU32(&s, encoder->data->items.count); // unsigned int(32) next_track_ID;
+ avifRWStreamFinishBox(&s, mvhd);
+
+ // -------------------------------------------------------------------
+ // Write tracks
+
+ for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
+ avifEncoderItem * item = &encoder->data->items.item[itemIndex];
+ if (item->encodeOutput->samples.count == 0) {
+ continue;
+ }
+
+ uint32_t syncSamplesCount = 0;
+ for (uint32_t sampleIndex = 0; sampleIndex < item->encodeOutput->samples.count; ++sampleIndex) {
+ avifEncodeSample * sample = &item->encodeOutput->samples.sample[sampleIndex];
+ if (sample->sync) {
+ ++syncSamplesCount;
+ }
+ }
+
+ avifBoxMarker trak = avifRWStreamWriteBox(&s, "trak", AVIF_BOX_SIZE_TBD);
+
+ avifBoxMarker tkhd = avifRWStreamWriteFullBox(&s, "tkhd", AVIF_BOX_SIZE_TBD, 1, 1);
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) creation_time;
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) modification_time;
+ avifRWStreamWriteU32(&s, itemIndex + 1); // unsigned int(32) track_ID;
+ avifRWStreamWriteU32(&s, 0); // const unsigned int(32) reserved = 0;
+ avifRWStreamWriteU64(&s, durationInTimescales); // unsigned int(64) duration;
+ avifRWStreamWriteZeros(&s, sizeof(uint32_t) * 2); // const unsigned int(32)[2] reserved = 0;
+ avifRWStreamWriteU16(&s, 0); // template int(16) layer = 0;
+ avifRWStreamWriteU16(&s, 0); // template int(16) alternate_group = 0;
+ avifRWStreamWriteU16(&s, 0); // template int(16) volume = {if track_is_audio 0x0100 else 0};
+ avifRWStreamWriteU16(&s, 0); // const unsigned int(16) reserved = 0;
+ avifRWStreamWrite(&s, unityMatrix, sizeof(unityMatrix)); // template int(32)[9] matrix= // { 0x00010000,0,0,0,0x00010000,0,0,0,0x40000000 };
+ avifRWStreamWriteU32(&s, imageMetadata->width << 16); // unsigned int(32) width;
+ avifRWStreamWriteU32(&s, imageMetadata->height << 16); // unsigned int(32) height;
+ avifRWStreamFinishBox(&s, tkhd);
+
+ if (item->irefToID != 0) {
+ avifBoxMarker tref = avifRWStreamWriteBox(&s, "tref", AVIF_BOX_SIZE_TBD);
+ avifBoxMarker refType = avifRWStreamWriteBox(&s, item->irefType, AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteU32(&s, (uint32_t)item->irefToID);
+ avifRWStreamFinishBox(&s, refType);
+ avifRWStreamFinishBox(&s, tref);
+ }
+
+ if (!item->alpha) {
+ avifEncoderWriteTrackMetaBox(encoder, &s);
+ }
+
+ avifBoxMarker mdia = avifRWStreamWriteBox(&s, "mdia", AVIF_BOX_SIZE_TBD);
+
+ avifBoxMarker mdhd = avifRWStreamWriteFullBox(&s, "mdhd", AVIF_BOX_SIZE_TBD, 1, 0);
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) creation_time;
+ avifRWStreamWriteU64(&s, now); // unsigned int(64) modification_time;
+ avifRWStreamWriteU32(&s, (uint32_t)encoder->timescale); // unsigned int(32) timescale;
+ avifRWStreamWriteU64(&s, durationInTimescales); // unsigned int(64) duration;
+ avifRWStreamWriteU16(&s, 21956); // bit(1) pad = 0; unsigned int(5)[3] language; ("und")
+ avifRWStreamWriteU16(&s, 0); // unsigned int(16) pre_defined = 0;
+ avifRWStreamFinishBox(&s, mdhd);
+
+ avifBoxMarker hdlrTrak = avifRWStreamWriteFullBox(&s, "hdlr", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 0); // unsigned int(32) pre_defined = 0;
+ avifRWStreamWriteChars(&s, "pict", 4); // unsigned int(32) handler_type;
+ avifRWStreamWriteZeros(&s, 12); // const unsigned int(32)[3] reserved = 0;
+ avifRWStreamWriteChars(&s, "libavif", 8); // string name; (writing null terminator)
+ avifRWStreamFinishBox(&s, hdlrTrak);
+
+ avifBoxMarker minf = avifRWStreamWriteBox(&s, "minf", AVIF_BOX_SIZE_TBD);
+
+ avifBoxMarker vmhd = avifRWStreamWriteFullBox(&s, "vmhd", AVIF_BOX_SIZE_TBD, 0, 1);
+ avifRWStreamWriteU16(&s, 0); // template unsigned int(16) graphicsmode = 0; (copy over the existing image)
+ avifRWStreamWriteZeros(&s, 6); // template unsigned int(16)[3] opcolor = {0, 0, 0};
+ avifRWStreamFinishBox(&s, vmhd);
+
+ avifBoxMarker dinf = avifRWStreamWriteBox(&s, "dinf", AVIF_BOX_SIZE_TBD);
+ avifBoxMarker dref = avifRWStreamWriteFullBox(&s, "dref", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) entry_count;
+ avifRWStreamWriteFullBox(&s, "url ", 0, 0, 1); // flags:1 means data is in this file
+ avifRWStreamFinishBox(&s, dref);
+ avifRWStreamFinishBox(&s, dinf);
+
+ avifBoxMarker stbl = avifRWStreamWriteBox(&s, "stbl", AVIF_BOX_SIZE_TBD);
+
+ avifBoxMarker stco = avifRWStreamWriteFullBox(&s, "stco", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) entry_count;
+ avifEncoderItemAddMdatFixup(item, &s); //
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) chunk_offset; (set later)
+ avifRWStreamFinishBox(&s, stco);
+
+ avifBoxMarker stsc = avifRWStreamWriteFullBox(&s, "stsc", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) entry_count;
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) first_chunk;
+ avifRWStreamWriteU32(&s, item->encodeOutput->samples.count); // unsigned int(32) samples_per_chunk;
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) sample_description_index;
+ avifRWStreamFinishBox(&s, stsc);
+
+ avifBoxMarker stsz = avifRWStreamWriteFullBox(&s, "stsz", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 0); // unsigned int(32) sample_size;
+ avifRWStreamWriteU32(&s, item->encodeOutput->samples.count); // unsigned int(32) sample_count;
+ for (uint32_t sampleIndex = 0; sampleIndex < item->encodeOutput->samples.count; ++sampleIndex) {
+ avifEncodeSample * sample = &item->encodeOutput->samples.sample[sampleIndex];
+ avifRWStreamWriteU32(&s, (uint32_t)sample->data.size); // unsigned int(32) entry_size;
+ }
+ avifRWStreamFinishBox(&s, stsz);
+
+ avifBoxMarker stss = avifRWStreamWriteFullBox(&s, "stss", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, syncSamplesCount); // unsigned int(32) entry_count;
+ for (uint32_t sampleIndex = 0; sampleIndex < item->encodeOutput->samples.count; ++sampleIndex) {
+ avifEncodeSample * sample = &item->encodeOutput->samples.sample[sampleIndex];
+ if (sample->sync) {
+ avifRWStreamWriteU32(&s, sampleIndex + 1); // unsigned int(32) sample_number;
+ }
+ }
+ avifRWStreamFinishBox(&s, stss);
+
+ avifBoxMarker stts = avifRWStreamWriteFullBox(&s, "stts", AVIF_BOX_SIZE_TBD, 0, 0);
+ size_t sttsEntryCountOffset = avifRWStreamOffset(&s);
+ uint32_t sttsEntryCount = 0;
+ avifRWStreamWriteU32(&s, 0); // unsigned int(32) entry_count;
+ for (uint32_t sampleCount = 0, frameIndex = 0; frameIndex < encoder->data->frames.count; ++frameIndex) {
+ avifEncoderFrame * frame = &encoder->data->frames.frame[frameIndex];
+ ++sampleCount;
+ if (frameIndex < (encoder->data->frames.count - 1)) {
+ avifEncoderFrame * nextFrame = &encoder->data->frames.frame[frameIndex + 1];
+ if (frame->durationInTimescales == nextFrame->durationInTimescales) {
+ continue;
+ }
+ }
+ avifRWStreamWriteU32(&s, sampleCount); // unsigned int(32) sample_count;
+ avifRWStreamWriteU32(&s, (uint32_t)frame->durationInTimescales); // unsigned int(32) sample_delta;
+ sampleCount = 0;
+ ++sttsEntryCount;
+ }
+ size_t prevOffset = avifRWStreamOffset(&s);
+ avifRWStreamSetOffset(&s, sttsEntryCountOffset);
+ avifRWStreamWriteU32(&s, sttsEntryCount);
+ avifRWStreamSetOffset(&s, prevOffset);
+ avifRWStreamFinishBox(&s, stts);
+
+ avifBoxMarker stsd = avifRWStreamWriteFullBox(&s, "stsd", AVIF_BOX_SIZE_TBD, 0, 0);
+ avifRWStreamWriteU32(&s, 1); // unsigned int(32) entry_count;
+ avifBoxMarker av01 = avifRWStreamWriteBox(&s, "av01", AVIF_BOX_SIZE_TBD);
+ avifRWStreamWriteZeros(&s, 6); // const unsigned int(8)[6] reserved = 0;
+ avifRWStreamWriteU16(&s, 1); // unsigned int(16) data_reference_index;
+ avifRWStreamWriteU16(&s, 0); // unsigned int(16) pre_defined = 0;
+ avifRWStreamWriteU16(&s, 0); // const unsigned int(16) reserved = 0;
+ avifRWStreamWriteZeros(&s, sizeof(uint32_t) * 3); // unsigned int(32)[3] pre_defined = 0;
+ avifRWStreamWriteU16(&s, (uint16_t)imageMetadata->width); // unsigned int(16) width;
+ avifRWStreamWriteU16(&s, (uint16_t)imageMetadata->height); // unsigned int(16) height;
+ avifRWStreamWriteU32(&s, 0x00480000); // template unsigned int(32) horizresolution
+ avifRWStreamWriteU32(&s, 0x00480000); // template unsigned int(32) vertresolution
+ avifRWStreamWriteU32(&s, 0); // const unsigned int(32) reserved = 0;
+ avifRWStreamWriteU16(&s, 1); // template unsigned int(16) frame_count = 1;
+ avifRWStreamWriteChars(&s, "\012AOM Coding", 11); // string[32] compressorname;
+ avifRWStreamWriteZeros(&s, 32 - 11); //
+ avifRWStreamWriteU16(&s, 0x0018); // template unsigned int(16) depth = 0x0018;
+ avifRWStreamWriteU16(&s, (uint16_t)0xffff); // int(16) pre_defined = -1;
+ writeConfigBox(&s, &item->codec->configBox);
+ if (!item->alpha) {
+ avifEncoderWriteColorProperties(&s, imageMetadata, NULL, NULL);
+ }
+ avifRWStreamFinishBox(&s, av01);
+ avifRWStreamFinishBox(&s, stsd);
+
+ avifRWStreamFinishBox(&s, stbl);
+
+ avifRWStreamFinishBox(&s, minf);
+ avifRWStreamFinishBox(&s, mdia);
+ avifRWStreamFinishBox(&s, trak);
+ }
+
+ // -------------------------------------------------------------------
+ // Finish moov box
+
+ avifRWStreamFinishBox(&s, moov);
+ }
+
+ // -----------------------------------------------------------------------
// Write mdat
- avifBoxMarker mdat = avifRWStreamWriteBox(&s, "mdat", -1, 0);
+ avifBoxMarker mdat = avifRWStreamWriteBox(&s, "mdat", AVIF_BOX_SIZE_TBD);
for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
avifEncoderItem * item = &encoder->data->items.item[itemIndex];
- if (item->content.size == 0) {
+ if ((item->metadataPayload.size == 0) && (item->encodeOutput->samples.count == 0)) {
continue;
}
- uint32_t infeOffset = (uint32_t)s.offset;
- avifRWStreamWrite(&s, item->content.data, item->content.size);
+ uint32_t chunkOffset = (uint32_t)avifRWStreamOffset(&s);
+ if (item->encodeOutput->samples.count > 0) {
+ for (uint32_t sampleIndex = 0; sampleIndex < item->encodeOutput->samples.count; ++sampleIndex) {
+ avifEncodeSample * sample = &item->encodeOutput->samples.sample[sampleIndex];
+ avifRWStreamWrite(&s, sample->data.data, sample->data.size);
+ }
+ } else {
+ avifRWStreamWrite(&s, item->metadataPayload.data, item->metadataPayload.size);
+ }
- if (item->infeOffsetOffset != 0) {
+ for (uint32_t fixupIndex = 0; fixupIndex < item->mdatFixups.count; ++fixupIndex) {
+ avifOffsetFixup * fixup = &item->mdatFixups.fixup[fixupIndex];
size_t prevOffset = avifRWStreamOffset(&s);
- avifRWStreamSetOffset(&s, item->infeOffsetOffset);
- avifRWStreamWriteU32(&s, infeOffset);
+ avifRWStreamSetOffset(&s, fixup->offset);
+ avifRWStreamWriteU32(&s, chunkOffset);
avifRWStreamSetOffset(&s, prevOffset);
}
}
@@ -518,16 +979,19 @@ avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRWData
avifRWStreamFinishWrite(&s);
- // -----------------------------------------------------------------------
- // Set result and cleanup
-
- result = AVIF_RESULT_OK;
+ return AVIF_RESULT_OK;
+}
-writeCleanup:
- return result;
+avifResult avifEncoderWrite(avifEncoder * encoder, const avifImage * image, avifRWData * output)
+{
+ avifResult addImageResult = avifEncoderAddImage(encoder, image, 1, AVIF_ADD_IMAGE_FLAG_SINGLE);
+ if (addImageResult != AVIF_RESULT_OK) {
+ return addImageResult;
+ }
+ return avifEncoderFinish(encoder, output);
}
-static avifBool avifImageIsOpaque(avifImage * image)
+static avifBool avifImageIsOpaque(const avifImage * image)
{
if (!image->alphaPlane) {
return AVIF_TRUE;
@@ -555,7 +1019,7 @@ static avifBool avifImageIsOpaque(avifImage * image)
return AVIF_TRUE;
}
-static void fillConfigBox(avifCodec * codec, avifImage * image, avifBool alpha)
+static void fillConfigBox(avifCodec * codec, const avifImage * image, avifBool alpha)
{
avifPixelFormatInfo formatInfo;
avifGetPixelFormatInfo(image->yuvFormat, &formatInfo);
@@ -584,7 +1048,7 @@ static void fillConfigBox(avifCodec * codec, avifImage * image, avifBool alpha)
case AVIF_PIXEL_FORMAT_YUV420:
seqProfile = 0;
break;
- case AVIF_PIXEL_FORMAT_YV12:
+ case AVIF_PIXEL_FORMAT_YUV400:
seqProfile = 0;
break;
case AVIF_PIXEL_FORMAT_NONE:
@@ -605,23 +1069,17 @@ static void fillConfigBox(avifCodec * codec, avifImage * image, avifBool alpha)
codec->configBox.seqProfile = seqProfile;
codec->configBox.seqLevelIdx0 = seqLevelIdx0;
codec->configBox.seqTier0 = 0;
- codec->configBox.highBitdepth = (image->depth > 8) ? 1 : 0;
- codec->configBox.twelveBit = (image->depth == 12) ? 1 : 0;
- codec->configBox.monochrome = alpha ? 1 : 0;
+ codec->configBox.highBitdepth = (image->depth > 8);
+ codec->configBox.twelveBit = (image->depth == 12);
+ codec->configBox.monochrome = (alpha || (image->yuvFormat == AVIF_PIXEL_FORMAT_YUV400));
codec->configBox.chromaSubsamplingX = (uint8_t)formatInfo.chromaShiftX;
codec->configBox.chromaSubsamplingY = (uint8_t)formatInfo.chromaShiftY;
-
- // TODO: choose the correct one from below:
- // * 0 - CSP_UNKNOWN Unknown (in this case the source video transfer function must be signaled outside the AV1 bitstream)
- // * 1 - CSP_VERTICAL Horizontally co-located with (0, 0) luma sample, vertical position in the middle between two luma samples
- // * 2 - CSP_COLOCATED co-located with (0, 0) luma sample
- // * 3 - CSP_RESERVED
- codec->configBox.chromaSamplePosition = 0;
+ codec->configBox.chromaSamplePosition = image->yuvChromaSamplePosition;
}
static void writeConfigBox(avifRWStream * s, avifCodecConfigurationBox * cfg)
{
- avifBoxMarker av1C = avifRWStreamWriteBox(s, "av1C", -1, 0);
+ avifBoxMarker av1C = avifRWStreamWriteBox(s, "av1C", AVIF_BOX_SIZE_TBD);
// unsigned int (1) marker = 1;
// unsigned int (7) version = 1;