summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-07-07 04:52:33 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-07-07 04:52:33 +0000
commit895b768a259284031c1dd1adf6e12ce233ef3f21 (patch)
tree5939537f1b684b149aed3b8fab8ab4711830d566
parenta6a4701f6f7030582edf1bcd4964ab51b4824063 (diff)
parentb2c0c512f112c0bb018b20a67b3e14755bf08746 (diff)
downloadlibwebm-android14-mainline-media-swcodec-release.tar.gz
Snap for 10453563 from b2c0c512f112c0bb018b20a67b3e14755bf08746 to mainline-media-swcodec-releaseaml_swc_341312300aml_swc_341312020aml_swc_341111000aml_swc_341011020aml_swc_340922010android14-mainline-media-swcodec-release
Change-Id: I42f259e5e1499cae78a87f04b6ec4299789babeb
-rw-r--r--.pylintrc441
-rw-r--r--.style.yapf2
-rw-r--r--AUTHORS.TXT1
-rw-r--r--Android.bp25
-rw-r--r--Android.mk23
-rw-r--r--CMakeLists.txt6
-rw-r--r--CONTRIBUTING.md29
-rw-r--r--METADATA8
-rw-r--r--PRESUBMIT.py202
-rw-r--r--build/cxx_flags.cmake1
-rw-r--r--build/msvc_runtime.cmake1
-rw-r--r--common/vp9_header_parser.cc11
-rw-r--r--common/vp9_header_parser.h4
-rw-r--r--common/vp9_level_stats.cc32
-rw-r--r--common/vp9_level_stats.h9
-rw-r--r--common/vp9_level_stats_tests.cc7
-rw-r--r--infra/README.md23
-rw-r--r--infra/common.sh65
-rwxr-xr-xinfra/compile.sh128
-rwxr-xr-xinfra/compile_android.sh80
-rwxr-xr-xinfra/run_unit_tests.sh200
-rw-r--r--m2ts/webm2pes.cc2
-rw-r--r--mkvmuxer/mkvmuxer.cc10
-rw-r--r--mkvmuxer/mkvmuxer.h8
-rw-r--r--mkvmuxer/mkvmuxerutil.cc2
-rw-r--r--mkvmuxer_sample.cc2
-rw-r--r--mkvparser/mkvparser.cc84
-rw-r--r--testing/mkvparser_fuzzer.cc160
-rw-r--r--webm_info.cc11
-rw-r--r--webm_parser/include/webm/file_reader.h13
-rw-r--r--webm_parser/include/webm/status.h5
-rw-r--r--webm_parser/src/file_reader.cc24
32 files changed, 1535 insertions, 84 deletions
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..4658b84
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,441 @@
+# This Pylint rcfile contains a best-effort configuration to uphold the
+# best-practices and style described in the Google Python style guide:
+# https://google.github.io/styleguide/pyguide.html
+#
+# Its canonical open-source location is:
+# https://google.github.io/styleguide/pylintrc
+
+[MASTER]
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore=third_party
+
+# Files or directories matching the regex patterns are skipped. The regex
+# matches against base names, not paths.
+ignore-patterns=
+
+# Pickle collected data for later comparisons.
+persistent=no
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Use multiple processes to speed up Pylint.
+jobs=4
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+#enable=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=abstract-method,
+ apply-builtin,
+ arguments-differ,
+ attribute-defined-outside-init,
+ backtick,
+ bad-option-value,
+ basestring-builtin,
+ buffer-builtin,
+ c-extension-no-member,
+ consider-using-enumerate,
+ cmp-builtin,
+ cmp-method,
+ coerce-builtin,
+ coerce-method,
+ delslice-method,
+ div-method,
+ duplicate-code,
+ eq-without-hash,
+ execfile-builtin,
+ file-builtin,
+ filter-builtin-not-iterating,
+ fixme,
+ getslice-method,
+ global-statement,
+ hex-method,
+ idiv-method,
+ implicit-str-concat-in-sequence,
+ import-error,
+ import-self,
+ import-star-module-level,
+ inconsistent-return-statements,
+ input-builtin,
+ intern-builtin,
+ invalid-str-codec,
+ locally-disabled,
+ long-builtin,
+ long-suffix,
+ map-builtin-not-iterating,
+ misplaced-comparison-constant,
+ missing-function-docstring,
+ metaclass-assignment,
+ next-method-called,
+ next-method-defined,
+ no-absolute-import,
+ no-else-break,
+ no-else-continue,
+ no-else-raise,
+ no-else-return,
+ no-init, # added
+ no-member,
+ no-name-in-module,
+ no-self-use,
+ nonzero-method,
+ oct-method,
+ old-division,
+ old-ne-operator,
+ old-octal-literal,
+ old-raise-syntax,
+ parameter-unpacking,
+ print-statement,
+ raising-string,
+ range-builtin-not-iterating,
+ raw_input-builtin,
+ rdiv-method,
+ reduce-builtin,
+ relative-import,
+ reload-builtin,
+ round-builtin,
+ setslice-method,
+ signature-differs,
+ standarderror-builtin,
+ suppressed-message,
+ sys-max-int,
+ too-few-public-methods,
+ too-many-ancestors,
+ too-many-arguments,
+ too-many-boolean-expressions,
+ too-many-branches,
+ too-many-instance-attributes,
+ too-many-locals,
+ too-many-nested-blocks,
+ too-many-public-methods,
+ too-many-return-statements,
+ too-many-statements,
+ trailing-newlines,
+ unichr-builtin,
+ unicode-builtin,
+ unnecessary-pass,
+ unpacking-in-except,
+ useless-else-on-loop,
+ useless-object-inheritance,
+ useless-suppression,
+ using-cmp-argument,
+ wrong-import-order,
+ xrange-builtin,
+ zip-builtin-not-iterating,
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]". This option is deprecated
+# and it will be removed in Pylint 2.0.
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[BASIC]
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=main,_,PRESUBMIT
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl
+
+# Regular expression matching correct function names
+function-rgx=^(?:(?P<exempt>setUp|tearDown|setUpModule|tearDownModule)|(?P<camel_case>_?[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_?[a-z][a-z0-9_]*))$
+
+# Regular expression matching correct variable names
+variable-rgx=^[a-z][a-z0-9_]*$
+
+# Regular expression matching correct constant names
+const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
+
+# Regular expression matching correct attribute names
+attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
+
+# Regular expression matching correct argument names
+argument-rgx=^[a-z][a-z0-9_]*$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=^[a-z][a-z0-9_]*$
+
+# Regular expression matching correct class names
+class-rgx=^_?[A-Z][a-zA-Z0-9]*$
+
+# Regular expression matching correct module names
+module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$
+
+# Regular expression matching correct method names
+method-rgx=(?x)^(?:(?P<exempt>_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P<snake_case>_{0,2}[a-z][a-z0-9_]*))$
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=10
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=80
+
+# TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt
+# lines made too long by directives to pytype.
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=(?x)(
+ ^\s*(\#\ )?<?https?://\S+>?$|
+ ^\s*(from\s+\S+\s+)?import\s+.+$)
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=yes
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=
+
+# Maximum number of lines in a module
+max-module-lines=99999
+
+# String used as indentation unit. The internal Google style guide mandates 2
+# spaces. Google's externaly-published style guide says 4, consistent with
+# PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google
+# projects (like TensorFlow).
+indent-string=' '
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=TODO
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=yes
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging,absl.logging,tensorflow.io.logging
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,
+ TERMIOS,
+ Bastion,
+ rexec,
+ sets
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant, absl
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls,
+ class_
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=StandardError,
+ Exception,
+ BaseException
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..47ca4cc
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = chromium \ No newline at end of file
diff --git a/AUTHORS.TXT b/AUTHORS.TXT
index 9686ac1..59b648c 100644
--- a/AUTHORS.TXT
+++ b/AUTHORS.TXT
@@ -2,3 +2,4 @@
# Name or Organization <email address>
Google Inc.
+Elijah Cirioli <eli.cirioli@gmail.com>
diff --git a/Android.bp b/Android.bp
index 4941a74..b68d2dd 100644
--- a/Android.bp
+++ b/Android.bp
@@ -57,3 +57,28 @@ cc_library {
},
},
}
+
+cc_library_static {
+ name: "libwebm_mkvparser",
+ host_supported: true,
+ export_include_dirs: ["."],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+ srcs: ["mkvparser/mkvparser.cc"],
+ sanitize: {
+ // Enable CFI if this becomes a shared library.
+ // cfi: true,
+ },
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
diff --git a/Android.mk b/Android.mk
new file mode 100644
index 0000000..e6c17df
--- /dev/null
+++ b/Android.mk
@@ -0,0 +1,23 @@
+# Ignore this file during non-NDK builds.
+ifdef NDK_ROOT
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libwebm
+LOCAL_CPPFLAGS:=-D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS
+LOCAL_CPPFLAGS+=-D__STDC_LIMIT_MACROS -std=c++11
+LOCAL_C_INCLUDES:= $(LOCAL_PATH)
+LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)
+
+LOCAL_SRC_FILES:= common/file_util.cc \
+ common/hdr_util.cc \
+ mkvparser/mkvparser.cc \
+ mkvparser/mkvreader.cc \
+ mkvmuxer/mkvmuxer.cc \
+ mkvmuxer/mkvmuxerutil.cc \
+ mkvmuxer/mkvwriter.cc
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-BSD
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := $(LOCAL_PATH)/LICENSE.TXT $(LOCAL_PATH)/PATENTS.TXT
+include $(BUILD_STATIC_LIBRARY)
+endif # NDK_ROOT
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9fa5a53..99e904f 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -25,7 +25,8 @@ option(ENABLE_IWYU "Enables include-what-you-use support." OFF)
option(ENABLE_WERROR "Enable warnings as errors." OFF)
option(ENABLE_WEBM_PARSER "Enables new parser API." OFF)
-if(WIN32)
+if(WIN32 OR CYGWIN OR MSYS)
+ # Allow use of rand_r() / fdopen() and other POSIX functions.
require_cxx_flag_nomsvc("-std=gnu++11")
else()
require_cxx_flag_nomsvc("-std=c++11")
@@ -47,6 +48,9 @@ if (MSVC)
endif ()
else ()
add_cxx_flag_if_supported("-Wall")
+ add_cxx_flag_if_supported("-Wc++14-compat")
+ add_cxx_flag_if_supported("-Wc++17-compat")
+ add_cxx_flag_if_supported("-Wc++20-compat")
add_cxx_flag_if_supported("-Wextra")
add_cxx_flag_if_supported("-Wnarrowing")
add_cxx_flag_if_supported("-Wno-deprecated")
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..7a73a30
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,29 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use a [Gerrit](https://www.gerritcodereview.com) instance hosted at
+https://chromium-review.googlesource.com for this purpose. See the
+[WebM Project page](https://www.webmproject.org/code/contribute/submitting-patches/)
+for additional details.
+
+## Community Guidelines
+
+This project follows
+[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/).
diff --git a/METADATA b/METADATA
index 44ea270..f2b0e26 100644
--- a/METADATA
+++ b/METADATA
@@ -9,11 +9,11 @@ third_party {
type: GIT
value: "https://chromium.googlesource.com/webm/libwebm"
}
- version: "485fb67b324aec5298765e899dc054459d3946e5"
+ version: "1930e3ca23b007f3ff11d98a570077be6201957e"
license_type: NOTICE
last_upgrade_date {
- year: 2020
- month: 11
- day: 24
+ year: 2023
+ month: 3
+ day: 20
}
}
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
new file mode 100644
index 0000000..58be26d
--- /dev/null
+++ b/PRESUBMIT.py
@@ -0,0 +1,202 @@
+# Copyright (c) 2021, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in
+# the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the name of Google nor the names of its contributors may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Top-level presubmit script for libwebm.
+
+See https://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+import re
+import subprocess2
+
+USE_PYTHON3 = True
+_BASH_INDENTATION = "2"
+_GIT_COMMIT_SUBJECT_LENGTH = 65
+_INCLUDE_BASH_FILES_ONLY = [r".*\.sh$"]
+_INCLUDE_SOURCE_FILES_ONLY = [r".*\.(c|cc|[hc]pp|h)$"]
+_LIBWEBM_MAX_LINE_LENGTH = 80
+
+
+def _CheckCommitSubjectLength(input_api, output_api):
+ """Ensures commit's subject length is no longer than 65 chars."""
+ name = "git-commit subject"
+ cmd = ["git", "log", "-1", "--pretty=%s"]
+ start = input_api.time.time()
+ proc = subprocess2.Popen(
+ cmd,
+ stderr=subprocess2.PIPE,
+ stdout=subprocess2.PIPE,
+ universal_newlines=True)
+
+ stdout, _ = proc.communicate()
+ duration = input_api.time.time() - start
+
+ if not re.match(r"^Revert",
+ stdout) and (len(stdout) - 1) > _GIT_COMMIT_SUBJECT_LENGTH:
+ failure_msg = (
+ "The commit subject: %s is too long (%d chars)\n"
+ "Try to keep this to 50 or less (up to 65 is permitted for "
+ "non-reverts).\n"
+ "https://www.git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-"
+ "Project#_commit_guidelines") % (stdout, len(stdout) - 1)
+ return output_api.PresubmitError("%s\n (%4.2fs) failed\n%s" %
+ (name, duration, failure_msg))
+
+ return output_api.PresubmitResult("%s\n (%4.2fs) success" % (name, duration))
+
+
+def _GetFilesToSkip(input_api):
+ """Skips libwebm-specific files."""
+ return list(input_api.DEFAULT_FILES_TO_SKIP) + [
+ r"\.pylintrc$",
+ ]
+
+
+def _CheckChangeLintsClean(input_api, output_api):
+ """Makes sure that libwebm/ code is cpplint clean."""
+ sources = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=_INCLUDE_SOURCE_FILES_ONLY, files_to_skip=None)
+ return input_api.canned_checks.CheckChangeLintsClean(input_api, output_api,
+ sources)
+
+
+def _RunShellCheckCmd(input_api, output_api, bash_file):
+ """shellcheck command wrapper."""
+ cmd = ["shellcheck", "-x", "-oall", "-sbash", bash_file]
+ name = "Check %s file." % bash_file
+ start = input_api.time.time()
+ output, rc = subprocess2.communicate(
+ cmd, stdout=None, stderr=subprocess2.PIPE, universal_newlines=True)
+ duration = input_api.time.time() - start
+ if rc == 0:
+ return output_api.PresubmitResult("%s\n%s (%4.2fs)\n" %
+ (name, " ".join(cmd), duration))
+ return output_api.PresubmitError("%s\n%s (%4.2fs) failed\n%s" %
+ (name, " ".join(cmd), duration, output[1]))
+
+
+def _RunShfmtCheckCmd(input_api, output_api, bash_file):
+ """shfmt command wrapper."""
+ cmd = [
+ "shfmt", "-i", _BASH_INDENTATION, "-bn", "-ci", "-sr", "-kp", "-d",
+ bash_file
+ ]
+ name = "Check %s file." % bash_file
+ start = input_api.time.time()
+ output, rc = subprocess2.communicate(
+ cmd, stdout=None, stderr=subprocess2.PIPE, universal_newlines=True)
+ duration = input_api.time.time() - start
+ if rc == 0:
+ return output_api.PresubmitResult("%s\n%s (%4.2fs)\n" %
+ (name, " ".join(cmd), duration))
+ return output_api.PresubmitError("%s\n%s (%4.2fs) failed\n%s" %
+ (name, " ".join(cmd), duration, output[1]))
+
+
+def _RunCmdOnCheckedFiles(input_api, output_api, run_cmd, files_to_check):
+ """Ensure that libwebm/ files are clean."""
+ file_filter = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=files_to_check, files_to_skip=None)
+
+ affected_files = input_api.change.AffectedFiles(file_filter=file_filter)
+ results = [
+ run_cmd(input_api, output_api, f.AbsoluteLocalPath())
+ for f in affected_files
+ ]
+ return results
+
+
+def _CommonChecks(input_api, output_api):
+ results = []
+ results.extend(
+ input_api.canned_checks.CheckChangeHasNoCrAndHasOnlyOneEol(
+ input_api, output_api))
+ results.extend(
+ input_api.canned_checks.CheckChangeHasNoTabs(input_api, output_api))
+ results.extend(
+ input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
+ input_api, output_api))
+ results.append(_CheckCommitSubjectLength(input_api, output_api))
+
+ source_file_filter = lambda x: input_api.FilterSourceFile(
+ x, files_to_skip=_GetFilesToSkip(input_api))
+ results.extend(
+ input_api.canned_checks.CheckLongLines(
+ input_api,
+ output_api,
+ maxlen=_LIBWEBM_MAX_LINE_LENGTH,
+ source_file_filter=source_file_filter))
+
+ results.extend(
+ input_api.canned_checks.CheckPatchFormatted(
+ input_api,
+ output_api,
+ check_clang_format=True,
+ check_python=True,
+ result_factory=output_api.PresubmitError))
+ results.extend(_CheckChangeLintsClean(input_api, output_api))
+
+ # Run pylint.
+ results.extend(
+ input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ files_to_skip=_GetFilesToSkip(input_api),
+ pylintrc=".pylintrc",
+ version="2.7"))
+
+ # Binaries shellcheck and shfmt are not installed in depot_tools.
+ # Installation is needed
+ try:
+ subprocess2.communicate(["shellcheck", "--version"])
+ results.extend(
+ _RunCmdOnCheckedFiles(input_api, output_api, _RunShellCheckCmd,
+ _INCLUDE_BASH_FILES_ONLY))
+ print("shfmt")
+ subprocess2.communicate(["shfmt", "-version"])
+ results.extend(
+ _RunCmdOnCheckedFiles(input_api, output_api, _RunShfmtCheckCmd,
+ _INCLUDE_BASH_FILES_ONLY))
+ except OSError as os_error:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ "%s\nPlease install missing binaries locally." % os_error.args[0]))
+ return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ results = []
+ results.extend(_CommonChecks(input_api, output_api))
+ return results
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ results = []
+ results.extend(_CommonChecks(input_api, output_api))
+ return results
diff --git a/build/cxx_flags.cmake b/build/cxx_flags.cmake
index 9e96889..1aa6a86 100644
--- a/build/cxx_flags.cmake
+++ b/build/cxx_flags.cmake
@@ -5,7 +5,6 @@
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
-cmake_minimum_required(VERSION 3.2)
include(CheckCXXCompilerFlag)
diff --git a/build/msvc_runtime.cmake b/build/msvc_runtime.cmake
index d7d7add..4f53a36 100644
--- a/build/msvc_runtime.cmake
+++ b/build/msvc_runtime.cmake
@@ -5,7 +5,6 @@
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
-cmake_minimum_required(VERSION 2.8)
if (MSVC)
# CMake defaults to producing code linked to the DLL MSVC runtime. In libwebm
diff --git a/common/vp9_header_parser.cc b/common/vp9_header_parser.cc
index 604a050..c017d7f 100644
--- a/common/vp9_header_parser.cc
+++ b/common/vp9_header_parser.cc
@@ -169,13 +169,16 @@ void Vp9HeaderParser::ParseColorSpace() {
void Vp9HeaderParser::ParseFrameResolution() {
width_ = VpxReadLiteral(16) + 1;
height_ = VpxReadLiteral(16) + 1;
+ if (ReadBit()) {
+ display_width_ = VpxReadLiteral(16) + 1;
+ display_height_ = VpxReadLiteral(16) + 1;
+ } else {
+ display_width_ = width_;
+ display_height_ = height_;
+ }
}
void Vp9HeaderParser::ParseFrameParallelMode() {
- if (ReadBit()) {
- VpxReadLiteral(16); // display width
- VpxReadLiteral(16); // display height
- }
if (!error_resilient_mode_) {
ReadBit(); // Consume refresh frame context
frame_parallel_mode_ = ReadBit();
diff --git a/common/vp9_header_parser.h b/common/vp9_header_parser.h
index 06bd656..3e57514 100644
--- a/common/vp9_header_parser.h
+++ b/common/vp9_header_parser.h
@@ -64,6 +64,8 @@ class Vp9HeaderParser {
int color_space() const { return color_space_; }
int width() const { return width_; }
int height() const { return height_; }
+ int display_width() const { return display_width_; }
+ int display_height() const { return display_height_; }
int refresh_frame_flags() const { return refresh_frame_flags_; }
int row_tiles() const { return row_tiles_; }
int column_tiles() const { return column_tiles_; }
@@ -115,6 +117,8 @@ class Vp9HeaderParser {
int refresh_frame_flags_;
int width_;
int height_;
+ int display_width_;
+ int display_height_;
int row_tiles_;
int column_tiles_;
int frame_parallel_mode_;
diff --git a/common/vp9_level_stats.cc b/common/vp9_level_stats.cc
index 76891e6..96a4bcc 100644
--- a/common/vp9_level_stats.cc
+++ b/common/vp9_level_stats.cc
@@ -17,21 +17,21 @@
namespace vp9_parser {
const Vp9LevelRow Vp9LevelStats::Vp9LevelTable[kNumVp9Levels] = {
- {LEVEL_1, 829440, 36864, 200, 400, 2, 1, 4, 8, 512},
- {LEVEL_1_1, 2764800, 73728, 800, 1000, 2, 1, 4, 8, 768},
- {LEVEL_2, 4608000, 122880, 1800, 1500, 2, 1, 4, 8, 960},
- {LEVEL_2_1, 9216000, 245760, 3600, 2800, 2, 2, 4, 8, 1344},
- {LEVEL_3, 20736000, 552960, 7200, 6000, 2, 4, 4, 8, 2048},
- {LEVEL_3_1, 36864000, 983040, 12000, 10000, 2, 4, 4, 8, 2752},
- {LEVEL_4, 83558400, 2228224, 18000, 16000, 4, 4, 4, 8, 4160},
- {LEVEL_4_1, 160432128, 2228224, 30000, 18000, 4, 4, 5, 6, 4160},
- {LEVEL_5, 311951360, 8912896, 60000, 36000, 6, 8, 6, 4, 8384},
- {LEVEL_5_1, 588251136, 8912896, 120000, 46000, 8, 8, 10, 4, 8384},
+ {LEVEL_1, 829440, 36864, 512, 200, 400, 2, 1, 4, 8},
+ {LEVEL_1_1, 2764800, 73728, 768, 800, 1000, 2, 1, 4, 8},
+ {LEVEL_2, 4608000, 122880, 960, 1800, 1500, 2, 1, 4, 8},
+ {LEVEL_2_1, 9216000, 245760, 1344, 3600, 2800, 2, 2, 4, 8},
+ {LEVEL_3, 20736000, 552960, 2048, 7200, 6000, 2, 4, 4, 8},
+ {LEVEL_3_1, 36864000, 983040, 2752, 12000, 10000, 2, 4, 4, 8},
+ {LEVEL_4, 83558400, 2228224, 4160, 18000, 16000, 4, 4, 4, 8},
+ {LEVEL_4_1, 160432128, 2228224, 4160, 30000, 18000, 4, 4, 5, 6},
+ {LEVEL_5, 311951360, 8912896, 8384, 60000, 36000, 6, 8, 6, 4},
+ {LEVEL_5_1, 588251136, 8912896, 8384, 120000, 46000, 8, 8, 10, 4},
// CPB Size = 0 for levels 5_2 to 6_2
- {LEVEL_5_2, 1176502272, 8912896, 180000, 0, 8, 8, 10, 4, 8384},
- {LEVEL_6, 1176502272, 35651584, 180000, 0, 8, 16, 10, 4, 16832},
- {LEVEL_6_1, 2353004544, 35651584, 240000, 0, 8, 16, 10, 4, 16832},
- {LEVEL_6_2, 4706009088, 35651584, 480000, 0, 8, 16, 10, 4, 16832}};
+ {LEVEL_5_2, 1176502272, 8912896, 8384, 180000, 0, 8, 8, 10, 4},
+ {LEVEL_6, 1176502272, 35651584, 16832, 180000, 0, 8, 16, 10, 4},
+ {LEVEL_6_1, 2353004544, 35651584, 16832, 240000, 0, 8, 16, 10, 4},
+ {LEVEL_6_2, 4706009088, 35651584, 16832, 480000, 0, 8, 16, 10, 4}};
void Vp9LevelStats::AddFrame(const Vp9HeaderParser& parser, int64_t time_ns) {
++frames;
@@ -129,7 +129,7 @@ Vp9Level Vp9LevelStats::GetLevel() const {
const int64_t max_luma_picture_breadth = GetMaxLumaPictureBreadth();
const double average_bitrate = GetAverageBitRate();
const double max_cpb_size = GetMaxCpbSize();
- const double compresion_ratio = GetCompressionRatio();
+ const double compression_ratio = GetCompressionRatio();
const int max_column_tiles = GetMaxColumnTiles();
const int min_altref_distance = GetMinimumAltrefDistance();
const int max_ref_frames = GetMaxReferenceFrames();
@@ -215,7 +215,7 @@ Vp9Level Vp9LevelStats::GetLevel() const {
// The minimum compression ratio has the same behavior as minimum altref
// distance.
- if (compresion_ratio < Vp9LevelTable[level_index].compresion_ratio)
+ if (compression_ratio < Vp9LevelTable[level_index].compression_ratio)
max_level = LEVEL_UNKNOWN;
return max_level;
}
diff --git a/common/vp9_level_stats.h b/common/vp9_level_stats.h
index 45d6f5c..4230385 100644
--- a/common/vp9_level_stats.h
+++ b/common/vp9_level_stats.h
@@ -40,20 +40,13 @@ enum Vp9Level {
};
struct Vp9LevelRow {
- Vp9LevelRow() = default;
- ~Vp9LevelRow() = default;
- Vp9LevelRow(Vp9LevelRow&& other) = default;
- Vp9LevelRow(const Vp9LevelRow& other) = default;
- Vp9LevelRow& operator=(Vp9LevelRow&& other) = delete;
- Vp9LevelRow& operator=(const Vp9LevelRow& other) = delete;
-
Vp9Level level;
int64_t max_luma_sample_rate;
int64_t max_luma_picture_size;
int64_t max_luma_picture_breadth;
double average_bitrate;
double max_cpb_size;
- double compresion_ratio;
+ double compression_ratio;
int max_tiles;
int min_altref_distance;
int max_ref_frames;
diff --git a/common/vp9_level_stats_tests.cc b/common/vp9_level_stats_tests.cc
index 0dec071..a39cf37 100644
--- a/common/vp9_level_stats_tests.cc
+++ b/common/vp9_level_stats_tests.cc
@@ -118,7 +118,7 @@ class Vp9LevelStatsTests : public ::testing::Test {
};
TEST_F(Vp9LevelStatsTests, VideoOnlyFile) {
- CreateAndLoadSegment("test_stereo_left_right.webm");
+ ASSERT_NO_FATAL_FAILURE(CreateAndLoadSegment("test_stereo_left_right.webm"));
ProcessTheFrames();
EXPECT_EQ(256, parser_.width());
EXPECT_EQ(144, parser_.height());
@@ -141,7 +141,8 @@ TEST_F(Vp9LevelStatsTests, VideoOnlyFile) {
}
TEST_F(Vp9LevelStatsTests, Muxed) {
- CreateAndLoadSegment("bbb_480p_vp9_opus_1second.webm", 4);
+ ASSERT_NO_FATAL_FAILURE(
+ CreateAndLoadSegment("bbb_480p_vp9_opus_1second.webm", 4));
ProcessTheFrames();
EXPECT_EQ(854, parser_.width());
EXPECT_EQ(480, parser_.height());
@@ -163,7 +164,7 @@ TEST_F(Vp9LevelStatsTests, Muxed) {
}
TEST_F(Vp9LevelStatsTests, SetDuration) {
- CreateAndLoadSegment("test_stereo_left_right.webm");
+ ASSERT_NO_FATAL_FAILURE(CreateAndLoadSegment("test_stereo_left_right.webm"));
ProcessTheFrames();
const int64_t kDurationNano = 2080000000; // 2.08 seconds
stats_.set_duration(kDurationNano);
diff --git a/infra/README.md b/infra/README.md
new file mode 100644
index 0000000..ce75d2d
--- /dev/null
+++ b/infra/README.md
@@ -0,0 +1,23 @@
+# Infra Build Scripts
+
+This directory contains scripts to build libwebm in various configurations.
+These scripts were created to support Jenkins integration pipelines but these
+scripts can also be run locally.
+
+## Environment
+
+Most of these scripts were ported from Jenkins, so in order to be run locally
+some environment variables must be set prior to invocation.
+
+**WORKSPACE** Traditionally, the Jenkins `WORKSPACE` path. If not defined, a
+temporary directory will be used.
+
+## LUCI Integration
+
+[Builder Dashboard](https://ci.chromium/p/open-codecs) \
+The new builders run these scripts on each CL. The current configuration
+supports the `refs/head/main` branch.
+
+## Scripts
+
+**compile.sh** Builds libwebm with supported configuration and host toolchains.
diff --git a/infra/common.sh b/infra/common.sh
new file mode 100644
index 0000000..5d9c8e5
--- /dev/null
+++ b/infra/common.sh
@@ -0,0 +1,65 @@
+# Copyright (c) 2021, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in
+# the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the name of Google nor the names of its contributors may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+log_err() {
+ echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" >&2
+}
+
+#######################################
+# Create build directory. Build directory will be deleted if it exists.
+# Outputs:
+# build dir path
+# Returns:
+# mkdir result
+#######################################
+make_build_dir() {
+ if [[ "$#" -ne 1 ]]; then
+ return 1
+ fi
+
+ local build_dir
+ build_dir="$1"
+ [[ -d "${build_dir}" ]] && rm -rf "${build_dir}"
+ mkdir -p "${build_dir}"
+}
+
+#######################################
+# Cleanup files from the backup directory.
+# Globals:
+# BUILD_DIR build directory
+# LIBWEBM_ROOT repository's root path
+#######################################
+cleanup() {
+ # BUILD_DIR is not completely removed to allow for binary artifacts to be
+ # extracted.
+ find "${BUILD_DIR:?}" \( -name "*.[ao]" -o -name "*.l[ao]" \) -exec rm \
+ -f {} +
+ make -C "${LIBWEBM_ROOT:?}" -f Makefile.unix clean
+}
diff --git a/infra/compile.sh b/infra/compile.sh
new file mode 100755
index 0000000..b4f81f9
--- /dev/null
+++ b/infra/compile.sh
@@ -0,0 +1,128 @@
+#!/bin/bash
+# Copyright (c) 2021, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in
+# the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the name of Google nor the names of its contributors may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+set -e
+shopt -s inherit_errexit
+
+LIBWEBM_ROOT="$(realpath "$(dirname "$0")/..")"
+readonly LIBWEBM_ROOT
+readonly WORKSPACE=${WORKSPACE:-"$(mktemp -d -t webm.XXX)"}
+
+# shellcheck source=infra/common.sh
+source "${LIBWEBM_ROOT}/infra/common.sh"
+
+usage() {
+ cat << EOF
+Usage: compile.sh BUILD_TYPE TARGET
+Options:
+BUILD_TYPE supported build type (static, static-debug)
+TARGET supported target platform compilation: (native, native-clang,
+ clang-i686, i686-w64-mingw32, x86_64-w64-mingw32,
+ native-Makefile.unix)
+Environment variables:
+WORKSPACE directory where the build is done
+EOF
+}
+
+#######################################
+# Setup ccache for toolchain.
+#######################################
+setup_ccache() {
+ if command -v ccache 2> /dev/null; then
+ export CCACHE_CPP2=yes
+ export PATH="/usr/lib/ccache:${PATH}"
+ fi
+}
+
+################################################################################
+echo "Building libwebm in ${WORKSPACE}"
+
+if [[ ! -d "${WORKSPACE}" ]]; then
+ log_err "${WORKSPACE} directory does not exist"
+ exit 1
+fi
+
+BUILD_TYPE=${1:?"Build type not defined.$(
+ echo
+ usage
+)"}
+TARGET=${2:?"Target not defined.$(
+ echo
+ usage
+)"}
+BUILD_DIR="${WORKSPACE}/build-${BUILD_TYPE}"
+
+trap cleanup EXIT
+setup_ccache
+make_build_dir "${BUILD_DIR}"
+
+case "${TARGET}" in
+ native-Makefile.unix)
+ make -C "${LIBWEBM_ROOT}" -f Makefile.unix
+ ;;
+ *)
+ opts=()
+ case "${BUILD_TYPE}" in
+ static) opts+=("-DCMAKE_BUILD_TYPE=Release") ;;
+ *debug) opts+=("-DCMAKE_BUILD_TYPE=Debug") ;;
+ *)
+ log_err "${BUILD_TYPE} build type not supported"
+ usage
+ exit 1
+ ;;
+ esac
+
+ TOOLCHAIN_FILE_FLAG="-DCMAKE_TOOLCHAIN_FILE=${LIBWEBM_ROOT}/build"
+ case "${TARGET}" in
+ native-clang) opts+=("-DCMAKE_CXX_COMPILER=clang++") ;;
+ clang-i686)
+ opts+=("-DCMAKE_CXX_COMPILER=clang++")
+ opts+=("-DCMAKE_CXX_FLAGS=-m32")
+ ;;
+ native) ;; # No additional flags needed.
+ i686-w64-mingw32)
+ opts+=("${TOOLCHAIN_FILE_FLAG}/x86-mingw-gcc.cmake")
+ ;;
+ x86_64-w64-mingw32)
+ opts+=("${TOOLCHAIN_FILE_FLAG}/x86_64-mingw-gcc.cmake")
+ ;;
+ *)
+ log_err "${TARGET} TARGET not supported"
+ usage
+ exit 1
+ ;;
+ esac
+ pushd "${BUILD_DIR}"
+ cmake "${LIBWEBM_ROOT}" "${opts[@]}"
+ make -j4 VERBOSE=1
+ popd
+ ;;
+esac
diff --git a/infra/compile_android.sh b/infra/compile_android.sh
new file mode 100755
index 0000000..df0d39c
--- /dev/null
+++ b/infra/compile_android.sh
@@ -0,0 +1,80 @@
+#!/bin/bash
+# Copyright (c) 2021, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in
+# the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the name of Google nor the names of its contributors may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+set -e
+shopt -s inherit_errexit
+
+LIBWEBM_ROOT="$(realpath "$(dirname "$0")/..")"
+readonly LIBWEBM_ROOT
+readonly WORKSPACE=${WORKSPACE:-"$(mktemp -d)"}
+
+# shellcheck source=infra/common.sh
+source "${LIBWEBM_ROOT}/infra/common.sh"
+
+usage() {
+ cat << EOF
+Usage: $(basename "$0") APP_OPTIM APP_ABI
+Options:
+APP_OPTIM supported build type (release, debug)
+APP_ABI supported application binary interface compilation: (armeabi-v7a,
+ arm64-v8a, x86, x86_64)
+Environment variables:
+WORKSPACE directory where the build is done
+EOF
+}
+
+################################################################################
+echo "Building libwebm for Android in ${WORKSPACE}"
+
+if [[ ! -d "${WORKSPACE}" ]]; then
+ log_err "${WORKSPACE} directory does not exist"
+ exit 1
+fi
+
+APP_OPTIM=${1:?"not defined.$(
+ echo
+ usage
+)"}
+APP_ABI=${2:?"Application Binary Interface not defined.$(
+ echo
+ usage
+)"}
+BUILD_DIR="${WORKSPACE}/build-${APP_OPTIM}"
+
+if ! command -v ndk-build 2> /dev/null; then
+ log_err "unable to find ndk-build in PATH"
+ exit 1
+fi
+
+make_build_dir "${BUILD_DIR}"
+ndk-build -j2 NDK_PROJECT_PATH="${BUILD_DIR}" APP_OPTIM="${APP_OPTIM}" \
+ APP_ABI="${APP_ABI}" APP_BUILD_SCRIPT="${LIBWEBM_ROOT}/Android.mk" \
+ APP_STL="c++_static"
diff --git a/infra/run_unit_tests.sh b/infra/run_unit_tests.sh
new file mode 100755
index 0000000..abcae55
--- /dev/null
+++ b/infra/run_unit_tests.sh
@@ -0,0 +1,200 @@
+#!/bin/bash
+# Copyright (c) 2021, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in
+# the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the name of Google nor the names of its contributors may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+set -xeo pipefail
+shopt -s inherit_errexit
+
+readonly GOOGLETEST_REPO="https://github.com/google/googletest.git"
+LIBWEBM_ROOT="$(realpath "$(dirname "$0")/..")"
+readonly LIBWEBM_ROOT
+readonly WORKSPACE=${WORKSPACE:-"$(mktemp -d -t webm.XXX)"}
+
+# shellcheck source=infra/common.sh
+source "${LIBWEBM_ROOT}/infra/common.sh"
+
+usage() {
+ cat << EOF
+Usage: $(basename "$0") TARGET
+Options:
+TARGET supported targets: (x86-asan, x86-ubsan, x86_64-asan, x86_64-ubsan,
+ x86_64-valgrind)
+Global variables:
+WORKSPACE directory where the build is done
+EOF
+}
+
+#######################################
+# Run valgrind
+#######################################
+run_valgrind() {
+ valgrind \
+ --leak-check=full \
+ --show-reachable=yes \
+ --track-origins=yes \
+ --error-exitcode=1 \
+ "$@"
+}
+
+#######################################
+# Ensure GoogleTest repository is setup
+#
+# Globals:
+# GOOGLETEST_REPO googletest repository url
+# WORKSPACE directory where the build is done
+#######################################
+ensure_googletest() {
+ local googletest_dir
+ googletest_dir="${WORKSPACE}/googletest"
+
+ if [[ ! -d "${googletest_dir}" ]] || ! git -C "${googletest_dir}" pull; then
+ rm -rf "${googletest_dir}"
+ git clone --depth 1 "${GOOGLETEST_REPO}" "${googletest_dir}"
+ fi
+
+ opts+=("-DGTEST_SRC_DIR=${googletest_dir}")
+}
+
+#######################################
+# Symbolizes and dumps warnings in the address sanitizer log
+# Globals:
+# BUILD_DIR directory where the build is done
+# SANITIZER_LOG path to sanitizer log file
+#######################################
+dump_sanitizer_log() {
+ if [[ "$#" -ne 1 ]]; then
+ return 1
+ fi
+
+ if command -v asan_symbolize; then
+ asan_symbolize_tool="asan_symbolize"
+ else
+ asan_symbolize_tool="asan_symbolize.py"
+ fi
+
+ local target
+ target="$1"
+ case "${target}" in
+ *-asan)
+ asanlog_symbolized="${BUILD_DIR}/asan_log.asanlog_symbolized"
+ grep -v 'Invalid VP9' "${SANITIZER_LOG}" > "${SANITIZER_LOG}.2" || true
+ "${asan_symbolize_tool}" "${BUILD_DIR}" < "${SANITIZER_LOG}.2" \
+ | c++filt > "${asanlog_symbolized}"
+ if [[ -s "${asanlog_symbolized}" ]]; then
+ cat "${asanlog_symbolized}"
+ return 1
+ fi
+ ;;
+ *) ;; # No other sanitizer options are required
+ # TODO(b/185520494): Handle ubsan warning output inspection
+ esac
+}
+
+################################################################################
+echo "Unit testing libwebm in ${WORKSPACE}"
+
+if [[ ! -d "${WORKSPACE}" ]]; then
+ log_err "${WORKSPACE} directory does not exist"
+ exit 1
+fi
+
+TARGET=${1:? "$(
+ echo
+ usage
+)"}
+readonly BUILD_DIR="${WORKSPACE}/tests-${TARGET}"
+readonly SANITIZER_LOG="${BUILD_DIR}/sanitizer_log"
+
+# Create a fresh build directory.
+trap 'dump_sanitizer_log ${TARGET}; cleanup' EXIT
+make_build_dir "${BUILD_DIR}"
+
+case "${TARGET}" in
+ x86-*) CXX='clang++ -m32' ;;
+ x86_64-*) CXX=clang++ ;;
+ *)
+ log_err "${TARGET} should have x86 or x86_64 prefix."
+ usage
+ exit 1
+ ;;
+esac
+# cmake (3.4.3) will only accept the -m32 variants when used via the CXX env
+# var.
+export CXX
+opts+=("-DCMAKE_BUILD_TYPE=Debug" "-DENABLE_TESTS=ON")
+case "${TARGET}" in
+ *-asan) opts+=("-DCMAKE_CXX_FLAGS=-fsanitize=address") ;;
+ *-ubsan)
+ opts+=("-DCMAKE_CXX_FLAGS=-fsanitize=integer")
+ if [[ "${TARGET}" == "x86-ubsan" ]]; then
+ # clang fails to find symbols when -fsanitize=integer is set in x86 arch.
+ # https://bugs.llvm.org/show_bug.cgi?id=17693
+ opts+=("-DCMAKE_CXX_FLAGS=-rtlib=compiler-rt")
+ opts+=("-DCMAKE_EXE_LINKER_FLAGS=-lgcc_s")
+ fi
+ ;;
+ *) ;; # No additional flags needed.
+esac
+
+ensure_googletest
+# Using pushd instead of -S/-B for backward compatibility with CMake < 3.13.x
+pushd "${BUILD_DIR}"
+cmake "${LIBWEBM_ROOT}" "${opts[@]}"
+make -j 4
+popd
+
+find_tests="$(find "${BUILD_DIR}" -name '*_tests')"
+UNIT_TESTS=()
+while IFS='' read -r line; do
+ UNIT_TESTS+=("${line}")
+done < <(echo "${find_tests}")
+
+export LIBWEBM_TEST_DATA_PATH="${LIBWEBM_ROOT}/testing/testdata"
+case "${TARGET}" in
+ *-asan | *-ubsan)
+ rm -f "${SANITIZER_LOG}"
+ for test in "${UNIT_TESTS[@]}"; do
+ "${test}" \
+ --gtest_output="xml:${BUILD_DIR}/$(basename "${test}")_detail.xml" \
+ 3<&1 1>&2 2>&3 | tee -a "${SANITIZER_LOG}"
+ done
+ ;;
+ *-valgrind)
+ for test in "${UNIT_TESTS[@]}"; do
+ run_valgrind --error-exitcode=1 "${test}" \
+ --gtest_output="xml:${BUILD_DIR}/$(basename "${test}")_detail.xml"
+ done
+ ;;
+ *)
+ log_err "Unrecognized TARGET:${TARGET}."
+ usage
+ exit 1
+ ;;
+esac
diff --git a/m2ts/webm2pes.cc b/m2ts/webm2pes.cc
index fc4b314..afa8a6b 100644
--- a/m2ts/webm2pes.cc
+++ b/m2ts/webm2pes.cc
@@ -68,7 +68,7 @@ void PesOptionalHeader::SetPtsBits(std::int64_t pts_90khz) {
// Top 8 bits of second PTS chunk.
buffer[3] |= (pts3 >> 7) & 0xff;
// bottom 7 bits of second PTS chunk.
- buffer[4] |= (pts3 << 1);
+ buffer[4] |= (pts3 << 1) & 0xff;
// Marker.
buffer[4] |= 1;
diff --git a/mkvmuxer/mkvmuxer.cc b/mkvmuxer/mkvmuxer.cc
index 499fbe6..faaf016 100644
--- a/mkvmuxer/mkvmuxer.cc
+++ b/mkvmuxer/mkvmuxer.cc
@@ -607,10 +607,10 @@ bool ContentEncoding::Write(IMkvWriter* writer) const {
return true;
}
-uint64_t ContentEncoding::EncodingSize(uint64_t compresion_size,
+uint64_t ContentEncoding::EncodingSize(uint64_t compression_size,
uint64_t encryption_size) const {
// TODO(fgalligan): Add support for compression settings.
- if (compresion_size != 0)
+ if (compression_size != 0)
return 0;
uint64_t encoding_size = 0;
@@ -774,7 +774,7 @@ bool Track::Write(IMkvWriter* writer) const {
return false;
// AV1 tracks require a CodecPrivate. See
- // https://github.com/Matroska-Org/matroska-specification/blob/av1-mappin/codec/av1.md
+ // https://github.com/ietf-wg-cellar/matroska-specification/blob/HEAD/codec/av1.md
// TODO(tomfinegan): Update the above link to the AV1 Matroska mappings to
// point to a stable version once it is finalized, or our own WebM mappings
// page on webmproject.org should we decide to release them.
@@ -2622,8 +2622,7 @@ bool Cluster::Finalize(bool set_last_frame_duration, uint64_t duration) {
uint64_t Cluster::Size() const {
const uint64_t element_size =
- EbmlMasterElementSize(static_cast<uint64_t>(libwebm::kMkvCluster),
- uint64_t{0xFFFFFFFFFFFFFFFFU}) +
+ EbmlMasterElementSize(libwebm::kMkvCluster, 0xFFFFFFFFFFFFFFFFULL) +
payload_size_;
return element_size;
}
@@ -3085,6 +3084,7 @@ Segment::Segment()
accurate_cluster_duration_(false),
fixed_size_cluster_timecode_(false),
estimate_file_duration_(false),
+ ebml_header_size_(0),
payload_pos_(0),
size_position_(0),
doc_type_version_(kDefaultDocTypeVersion),
diff --git a/mkvmuxer/mkvmuxer.h b/mkvmuxer/mkvmuxer.h
index f2db377..8602d82 100644
--- a/mkvmuxer/mkvmuxer.h
+++ b/mkvmuxer/mkvmuxer.h
@@ -330,7 +330,7 @@ class ContentEncoding {
private:
// Returns the size in bytes for the encoding elements.
- uint64_t EncodingSize(uint64_t compresion_size,
+ uint64_t EncodingSize(uint64_t compression_size,
uint64_t encryption_size) const;
// Returns the size in bytes for the encryption elements.
@@ -1425,7 +1425,7 @@ class SeekHead {
bool Write(IMkvWriter* writer);
// We are going to put a cap on the number of Seek Entries.
- const static int32_t kSeekEntryCount = 5;
+ constexpr static int32_t kSeekEntryCount = 5;
private:
// Returns the maximum size in bytes of one seek entry.
@@ -1505,8 +1505,8 @@ class Segment {
kBeforeClusters = 0x1 // Position Cues before Clusters
};
- static const uint32_t kDefaultDocTypeVersion = 4;
- static const uint64_t kDefaultMaxClusterDuration = 30000000000ULL;
+ static constexpr uint32_t kDefaultDocTypeVersion = 4;
+ static constexpr uint64_t kDefaultMaxClusterDuration = 30000000000ULL;
Segment();
~Segment();
diff --git a/mkvmuxer/mkvmuxerutil.cc b/mkvmuxer/mkvmuxerutil.cc
index 6436817..300b155 100644
--- a/mkvmuxer/mkvmuxerutil.cc
+++ b/mkvmuxer/mkvmuxerutil.cc
@@ -606,7 +606,7 @@ uint64 WriteVoidElement(IMkvWriter* writer, uint64 size) {
void GetVersion(int32* major, int32* minor, int32* build, int32* revision) {
*major = 0;
- *minor = 2;
+ *minor = 3;
*build = 1;
*revision = 0;
}
diff --git a/mkvmuxer_sample.cc b/mkvmuxer_sample.cc
index 9ef5569..0b84d72 100644
--- a/mkvmuxer_sample.cc
+++ b/mkvmuxer_sample.cc
@@ -66,7 +66,7 @@ void Usage() {
printf(" 1: Equirectangular\n");
printf(" 2: Cube map\n");
printf(" 3: Mesh\n");
- printf(" -projection_file <string> Override projection private data");
+ printf(" -projection_file <string> Override projection private data\n");
printf(" with contents of this file\n");
printf(" -projection_pose_yaw <float> Projection pose yaw\n");
printf(" -projection_pose_pitch <float> Projection pose pitch\n");
diff --git a/mkvparser/mkvparser.cc b/mkvparser/mkvparser.cc
index 412e6a5..868afcb 100644
--- a/mkvparser/mkvparser.cc
+++ b/mkvparser/mkvparser.cc
@@ -54,9 +54,9 @@ Type* SafeArrayAlloc(unsigned long long num_elements,
void GetVersion(int& major, int& minor, int& build, int& revision) {
major = 1;
- minor = 0;
- build = 0;
- revision = 30;
+ minor = 1;
+ build = 1;
+ revision = 0;
}
long long ReadUInt(IMkvReader* pReader, long long pos, long& len) {
@@ -298,7 +298,7 @@ long UnserializeInt(IMkvReader* pReader, long long pos, long long size,
if (status < 0)
return status;
- unsigned long long result = first_byte;
+ unsigned long long result = static_cast<unsigned long long>(first_byte);
++pos;
for (long i = 1; i < size; ++i) {
@@ -1502,8 +1502,8 @@ long SeekHead::Parse() {
// first count the seek head entries
- int entry_count = 0;
- int void_element_count = 0;
+ long long entry_count = 0;
+ long long void_element_count = 0;
while (pos < stop) {
long long id, size;
@@ -1513,10 +1513,15 @@ long SeekHead::Parse() {
if (status < 0) // error
return status;
- if (id == libwebm::kMkvSeek)
+ if (id == libwebm::kMkvSeek) {
++entry_count;
- else if (id == libwebm::kMkvVoid)
+ if (entry_count > INT_MAX)
+ return E_PARSE_FAILED;
+ } else if (id == libwebm::kMkvVoid) {
++void_element_count;
+ if (void_element_count > INT_MAX)
+ return E_PARSE_FAILED;
+ }
pos += size; // consume payload
@@ -1528,14 +1533,15 @@ long SeekHead::Parse() {
return E_FILE_FORMAT_INVALID;
if (entry_count > 0) {
- m_entries = new (std::nothrow) Entry[entry_count];
+ m_entries = new (std::nothrow) Entry[static_cast<size_t>(entry_count)];
if (m_entries == NULL)
return -1;
}
if (void_element_count > 0) {
- m_void_elements = new (std::nothrow) VoidElement[void_element_count];
+ m_void_elements =
+ new (std::nothrow) VoidElement[static_cast<size_t>(void_element_count)];
if (m_void_elements == NULL)
return -1;
@@ -1582,13 +1588,13 @@ long SeekHead::Parse() {
ptrdiff_t count_ = ptrdiff_t(pEntry - m_entries);
assert(count_ >= 0);
- assert(count_ <= entry_count);
+ assert(static_cast<long long>(count_) <= entry_count);
m_entry_count = static_cast<int>(count_);
count_ = ptrdiff_t(pVoidElement - m_void_elements);
assert(count_ >= 0);
- assert(count_ <= void_element_count);
+ assert(static_cast<long long>(count_) <= void_element_count);
m_void_element_count = static_cast<int>(count_);
@@ -2299,7 +2305,7 @@ bool CuePoint::Load(IMkvReader* pReader) {
long long pos = pos_;
// First count number of track positions
-
+ unsigned long long track_positions_count = 0;
while (pos < stop) {
long len;
@@ -2323,12 +2329,17 @@ bool CuePoint::Load(IMkvReader* pReader) {
if (id == libwebm::kMkvCueTime)
m_timecode = UnserializeUInt(pReader, pos, size);
- else if (id == libwebm::kMkvCueTrackPositions)
- ++m_track_positions_count;
+ else if (id == libwebm::kMkvCueTrackPositions) {
+ ++track_positions_count;
+ if (track_positions_count > UINT_MAX)
+ return E_PARSE_FAILED;
+ }
pos += size; // consume payload
}
+ m_track_positions_count = static_cast<size_t>(track_positions_count);
+
if (m_timecode < 0 || m_track_positions_count <= 0) {
return false;
}
@@ -2421,7 +2432,7 @@ bool CuePoint::TrackPosition::Parse(IMkvReader* pReader, long long start_,
pos += size; // consume payload
}
- if ((m_pos < 0) || (m_track <= 0)) {
+ if ((m_pos < 0) || (m_track <= 0) || (m_block < 0) || (m_block > LONG_MAX)) {
return false;
}
@@ -4194,8 +4205,8 @@ long ContentEncoding::ParseContentEncodingEntry(long long start, long long size,
const long long stop = start + size;
// Count ContentCompression and ContentEncryption elements.
- int compression_count = 0;
- int encryption_count = 0;
+ long long compression_count = 0;
+ long long encryption_count = 0;
while (pos < stop) {
long long id, size;
@@ -4203,11 +4214,17 @@ long ContentEncoding::ParseContentEncodingEntry(long long start, long long size,
if (status < 0) // error
return status;
- if (id == libwebm::kMkvContentCompression)
+ if (id == libwebm::kMkvContentCompression) {
++compression_count;
+ if (compression_count > INT_MAX)
+ return E_PARSE_FAILED;
+ }
- if (id == libwebm::kMkvContentEncryption)
+ if (id == libwebm::kMkvContentEncryption) {
++encryption_count;
+ if (encryption_count > INT_MAX)
+ return E_PARSE_FAILED;
+ }
pos += size; // consume payload
if (pos > stop)
@@ -4218,16 +4235,16 @@ long ContentEncoding::ParseContentEncodingEntry(long long start, long long size,
return -1;
if (compression_count > 0) {
- compression_entries_ =
- new (std::nothrow) ContentCompression*[compression_count];
+ compression_entries_ = new (std::nothrow)
+ ContentCompression*[static_cast<size_t>(compression_count)];
if (!compression_entries_)
return -1;
compression_entries_end_ = compression_entries_;
}
if (encryption_count > 0) {
- encryption_entries_ =
- new (std::nothrow) ContentEncryption*[encryption_count];
+ encryption_entries_ = new (std::nothrow)
+ ContentEncryption*[static_cast<size_t>(encryption_count)];
if (!encryption_entries_) {
delete[] compression_entries_;
compression_entries_ = NULL;
@@ -4918,7 +4935,7 @@ long Track::ParseContentEncodingsEntry(long long start, long long size) {
const long long stop = start + size;
// Count ContentEncoding elements.
- int count = 0;
+ long long count = 0;
while (pos < stop) {
long long id, size;
const long status = ParseElementHeader(pReader, pos, stop, id, size);
@@ -4926,8 +4943,11 @@ long Track::ParseContentEncodingsEntry(long long start, long long size) {
return status;
// pos now designates start of element
- if (id == libwebm::kMkvContentEncoding)
+ if (id == libwebm::kMkvContentEncoding) {
++count;
+ if (count > INT_MAX)
+ return E_PARSE_FAILED;
+ }
pos += size; // consume payload
if (pos > stop)
@@ -4937,7 +4957,8 @@ long Track::ParseContentEncodingsEntry(long long start, long long size) {
if (count <= 0)
return -1;
- content_encoding_entries_ = new (std::nothrow) ContentEncoding*[count];
+ content_encoding_entries_ =
+ new (std::nothrow) ContentEncoding*[static_cast<size_t>(count)];
if (!content_encoding_entries_)
return -1;
@@ -5653,7 +5674,7 @@ long Tracks::Parse() {
const long long stop = m_start + m_size;
IMkvReader* const pReader = m_pSegment->m_pReader;
- int count = 0;
+ long long count = 0;
long long pos = m_start;
while (pos < stop) {
@@ -5667,8 +5688,11 @@ long Tracks::Parse() {
if (size == 0) // weird
continue;
- if (id == libwebm::kMkvTrackEntry)
+ if (id == libwebm::kMkvTrackEntry) {
++count;
+ if (count > INT_MAX)
+ return E_PARSE_FAILED;
+ }
pos += size; // consume payload
if (pos > stop)
@@ -5681,7 +5705,7 @@ long Tracks::Parse() {
if (count <= 0)
return 0; // success
- m_trackEntries = new (std::nothrow) Track*[count];
+ m_trackEntries = new (std::nothrow) Track*[static_cast<size_t>(count)];
if (m_trackEntries == NULL)
return -1;
diff --git a/testing/mkvparser_fuzzer.cc b/testing/mkvparser_fuzzer.cc
new file mode 100644
index 0000000..b3bb799
--- /dev/null
+++ b/testing/mkvparser_fuzzer.cc
@@ -0,0 +1,160 @@
+// Copyright (c) 2022 The WebM project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+#include <cstddef>
+#include <cstdint>
+#include <cstdlib>
+#include <cstring>
+#include <functional>
+#include <memory>
+#include <new>
+
+#include "mkvparser/mkvparser.h"
+#include "mkvparser/mkvreader.h"
+
+namespace {
+
+class MemoryReader : public mkvparser::IMkvReader {
+ public:
+ MemoryReader(const uint8_t* data, size_t size) : data_(data), size_(size) {}
+
+ int Read(long long pos, long len, unsigned char* buf) override {
+ if (pos < 0 || len < 0) {
+ abort();
+ }
+ if (pos >= size_ || size_ - pos < len) {
+ return -1;
+ }
+ memcpy(buf, data_ + pos, len);
+ return 0;
+ }
+
+ int Length(long long* total, long long* available) override {
+ if (total != nullptr) {
+ *total = size_;
+ }
+ if (available != nullptr) {
+ *available = size_;
+ }
+ return 0;
+ }
+
+ private:
+ const uint8_t* data_;
+ size_t size_;
+};
+
+void ParseCues(const mkvparser::Segment& segment) {
+ const mkvparser::Cues* const cues = segment.GetCues();
+ if (cues == nullptr) {
+ return;
+ }
+
+ while (!cues->DoneParsing()) {
+ cues->LoadCuePoint();
+ }
+}
+
+const mkvparser::BlockEntry* GetBlockEntryFromCues(
+ const void* ctx, const mkvparser::CuePoint* cue,
+ const mkvparser::CuePoint::TrackPosition* track_pos) {
+ const auto* const cues = static_cast<const mkvparser::Cues*>(ctx);
+ return cues->GetBlock(cue, track_pos);
+}
+
+const mkvparser::BlockEntry* GetBlockEntryFromCluster(
+ const void* ctx, const mkvparser::CuePoint* cue,
+ const mkvparser::CuePoint::TrackPosition* track_pos) {
+ if (track_pos == nullptr) {
+ return nullptr;
+ }
+ const auto* const cluster = static_cast<const mkvparser::Cluster*>(ctx);
+ const mkvparser::BlockEntry* block_entry =
+ cluster->GetEntry(*cue, *track_pos);
+ return block_entry;
+}
+
+void WalkCues(const mkvparser::Segment& segment,
+ std::function<const mkvparser::BlockEntry*(
+ const void*, const mkvparser::CuePoint*,
+ const mkvparser::CuePoint::TrackPosition*)>
+ get_block_entry,
+ const void* ctx) {
+ const mkvparser::Cues* const cues = segment.GetCues();
+ const mkvparser::Tracks* tracks = segment.GetTracks();
+ if (cues == nullptr || tracks == nullptr) {
+ return;
+ }
+ const unsigned long num_tracks = tracks->GetTracksCount();
+
+ for (const mkvparser::CuePoint* cue = cues->GetFirst(); cue != nullptr;
+ cue = cues->GetNext(cue)) {
+ for (unsigned long track_num = 0; track_num < num_tracks; ++track_num) {
+ const mkvparser::Track* const track = tracks->GetTrackByIndex(track_num);
+ const mkvparser::CuePoint::TrackPosition* const track_pos =
+ cue->Find(track);
+ const mkvparser::BlockEntry* block_entry =
+ get_block_entry(ctx, cue, track_pos);
+ static_cast<void>(block_entry);
+ }
+ }
+}
+
+void ParseCluster(const mkvparser::Cluster& cluster) {
+ const mkvparser::BlockEntry* block_entry;
+ long status = cluster.GetFirst(block_entry);
+ if (status != 0) {
+ return;
+ }
+
+ while (block_entry != nullptr && !block_entry->EOS()) {
+ const mkvparser::Block* const block = block_entry->GetBlock();
+ if (block == nullptr) {
+ return;
+ }
+
+ status = cluster.GetNext(block_entry, block_entry);
+ if (status != 0) {
+ return;
+ }
+ }
+}
+
+} // namespace
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ MemoryReader reader(data, size);
+
+ long long int pos = 0;
+ std::unique_ptr<mkvparser::EBMLHeader> ebml_header(
+ new (std::nothrow) mkvparser::EBMLHeader()); // NOLINT
+ if (ebml_header->Parse(&reader, pos) < 0) {
+ return 0;
+ }
+
+ mkvparser::Segment* temp_segment;
+ if (mkvparser::Segment::CreateInstance(&reader, pos, temp_segment) != 0) {
+ return 0;
+ }
+ std::unique_ptr<mkvparser::Segment> segment(temp_segment);
+
+ if (segment->Load() < 0) {
+ return 0;
+ }
+
+ ParseCues(*segment);
+ WalkCues(*segment, GetBlockEntryFromCues, segment->GetCues());
+
+ const mkvparser::Cluster* cluster = segment->GetFirst();
+ while (cluster != nullptr && !cluster->EOS()) {
+ ParseCluster(*cluster);
+ WalkCues(*segment, GetBlockEntryFromCluster, cluster);
+ cluster = segment->GetNext(cluster);
+ }
+
+ return 0;
+}
diff --git a/webm_info.cc b/webm_info.cc
index fdf4759..7cc7272 100644
--- a/webm_info.cc
+++ b/webm_info.cc
@@ -761,8 +761,15 @@ void PrintVP9Info(const uint8_t* data, int size, FILE* o, int64_t time_ns,
version, altref_frame, error_resilient_mode, row_tiles,
column_tiles, frame_parallel_mode);
- if (key && size > 4) {
- fprintf(o, " cs:%d", parser->color_space());
+ if (key) {
+ if (size > 4) {
+ fprintf(o, " cs:%d", parser->color_space());
+ }
+ if (parser->display_width() != parser->width() ||
+ parser->display_height() != parser->height()) {
+ fprintf(o, " dw:%d dh:%d", parser->display_width(),
+ parser->display_height());
+ }
}
if (count > 0) {
diff --git a/webm_parser/include/webm/file_reader.h b/webm_parser/include/webm/file_reader.h
index 6ccdc4d..69bec76 100644
--- a/webm_parser/include/webm/file_reader.h
+++ b/webm_parser/include/webm/file_reader.h
@@ -67,6 +67,19 @@ class FileReader : public Reader {
Status Skip(std::uint64_t num_to_skip,
std::uint64_t* num_actually_skipped) override;
+ /**
+ Moves the reader to a new absolute byte position in the file.
+
+ It is required to call DidSeek() on the parser after successfully seeking.
+ Seeking will only work on actual files, not stdin or pipes.
+
+ \param seek_position The new absolute byte position in the file.
+ \return `Status::kOkCompleted` if reader position is now `seek_position`.
+ `Status::kSeekFailed` if the reader was unable to seek to `seek_position`
+ such as when the file is stdin.
+ */
+ Status Seek(std::uint64_t seek_position);
+
std::uint64_t Position() const override;
private:
diff --git a/webm_parser/include/webm/status.h b/webm_parser/include/webm/status.h
index ff3f0c6..9577ef4 100644
--- a/webm_parser/include/webm/status.h
+++ b/webm_parser/include/webm/status.h
@@ -56,6 +56,11 @@ struct Status {
*/
kEndOfFile = -3,
+ /**
+ The reader was unable to seek to the requested location.
+ */
+ kSeekFailed = -4,
+
// Parsing errors. Range: -1025 to -2048.
/**
An element's ID is malformed.
diff --git a/webm_parser/src/file_reader.cc b/webm_parser/src/file_reader.cc
index 0921abe..a2a397b 100644
--- a/webm_parser/src/file_reader.cc
+++ b/webm_parser/src/file_reader.cc
@@ -16,6 +16,19 @@
#include "webm/status.h"
+#ifdef _MSC_VER
+#define FSEEK_(stream, offset, whence) _fseeki64(stream, offset, whence)
+#elif defined(_WIN32)
+#define FSEEK_(stream, offset, whence) \
+ fseeko64(stream, static_cast<off_t>(offset), whence)
+#elif _POSIX_C_SOURCE >= 200112L
+#define FSEEK_(stream, offset, whence) \
+ fseeko(stream, static_cast<off_t>(offset), whence)
+#else
+#define FSEEK_(stream, offset, whence) \
+ std::fseek(stream, static_cast<long>(offset), whence)
+#endif
+
namespace webm {
FileReader::FileReader(FILE* file) : file_(file) { assert(file); }
@@ -77,8 +90,7 @@ Status FileReader::Skip(std::uint64_t num_to_skip,
if (num_to_skip < static_cast<unsigned long>(seek_offset)) { // NOLINT
seek_offset = static_cast<long>(num_to_skip); // NOLINT
}
- // TODO(mjbshaw): Use fseeko64/_fseeki64 if available.
- if (!std::fseek(file_.get(), seek_offset, SEEK_CUR)) {
+ if (!FSEEK_(file_.get(), seek_offset, SEEK_CUR)) {
*num_actually_skipped = static_cast<std::uint64_t>(seek_offset);
position_ += static_cast<std::uint64_t>(seek_offset);
if (static_cast<unsigned long>(seek_offset) == num_to_skip) { // NOLINT
@@ -117,6 +129,14 @@ Status FileReader::Skip(std::uint64_t num_to_skip,
}
}
+Status FileReader::Seek(std::uint64_t seek_position) {
+ if (FSEEK_(file_.get(), seek_position, SEEK_SET)) {
+ return Status(Status::kSeekFailed);
+ }
+ position_ = seek_position;
+ return Status(Status::kOkCompleted);
+}
+
std::uint64_t FileReader::Position() const { return position_; }
} // namespace webm